Experiment 1: Getting heatmaps

In [ ]:
%res
%load_ext autoreload
%autoreload 2
from additive.utility import *
from additive.features import *
In [ ]:
client = Client("tcp://10.142.0.26:8786")
client.restart()
In [ ]:
files = ['/home/ben_rasoolov/additive_data/experiment_03/V17_T2_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V13_T1_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V11_T1_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V11_T2_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V12_T1_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V17_T2_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_proiject/data/experiment_03/V14_T2_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V13_T2_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V18_T2_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V11_T1_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V13_T1_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V14_T1_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V18_T2_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V11_T2_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V17_T1_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V11_T2_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V11_T2_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V17_T1_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V13_T1_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V11_T1_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V11_T1_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V12_T2_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V14_T1_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V12_T1_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V17_T2_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V17_T1_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V18_T1_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V12_T2_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V17_T1_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V13_T2_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V13_T1_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V13_T2_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V13_T2_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V17_T2_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V14_T2_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V18_T1_Left(Bottom)_500X_3D.info']
In [ ]:
from scipy.ndimage import sobel
import cv2
def get_top_and_bottom(x):
    mask = 1-cv2.dilate(x.astype('uint8'), np.ones((40, 40)))
    t_, contours, hierarchy = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
    good_contours = sorted(contours, key=lambda x: -len(x))[:2]
    print([len(x) for x in good_contours])
    imt = np.zeros_like(mask)
    cv2.drawContours(imt, good_contours, -1, 1, -1)
    plt.imshow(imt)
    plt.show()
    return imt
In [ ]:
from additive.utility import dfe
from additive.preprocessing import load_and_process_image
def get_heatmap(file_name):
    d, f, e = dfe(file_name)
    image = load_and_process_image(file_name, transform_fun=lambda x: x['value'].x)
    sub_image_whole = image#.copy()#[500:-500, 500:-500]
    #sub_image = sub_image_whole[500:-500:20, 500:-500:20].copy()
    #fig, axes = plt.subplots(figsize=(15, 10))
    mu = sub_image_whole[500:-500, 500:-500].mean()
    std = sub_image_whole[500:-500, 500:-500].std()
    cond = (sub_image_whole>mu-std) | (sub_image_whole < mu-4*std)
    sub_image_whole[np.where(cond)] = mu-std
    #sub_image_whole[np.where(get_top_and_bottom(cond))] = mu
    #plt.imshow(sub_image_whole[500:-500, 100:-100], cmap='jet')
    #plt.axis('off')
    plt.imsave(d+f+'.png', sub_image_whole[500:-500, 100:-100], cmap='jet')
In [ ]:
from multiprocessing import Pool
with dask.config.set(pool=Pool(8)):
    bag.from_sequence(files).map(get_heatmap).compute()
In [ ]:
files = glob.glob("/home/ben_rasoolov/additive_data/experiment_03/*png")
for file in files:
    image = cv2.imread(file)
    image2 = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
    cv2.imwrite(file, image2)
In [ ]:
fig, axes = plt.subplots(figsize=(10, 10))
files = glob.glob("/home/ben_rasoolov/additive_data/experiment_03/*png")
file = np.random.choice(files)
plt.imshow(cv2.imread(file))

Experiment 2: Getting 3D surface

In [36]:
%reset -f
%load_ext autoreload
%autoreload 2
from additive.utility import *
from additive.features import *
The autoreload extension is already loaded. To reload it, use:
  %reload_ext autoreload
In [37]:
files = [
 '..periment_03/V17_T2_Right(Top)_500X_3D.info',
 '../data/experiment_03/V13_T1_Left(Bottom)_500X_3D.info',
 '../data/experiment_03/Polished_V11_T1_Left(Bottom)_500X_3D.info',
 '../data/experiment_03/V11_T2_Right(Top)_500X_3D.info',
 '../data/experiment_03/V12_T1_Right(Top)_500X_3D.info',
 '../data/experiment_03/V17_T2_Left(Bottom)_500X_3D.info',
 '../data/experiment_03/V14_T2_Left(Bottom)_500X_3D.info',
 '../data/experiment_03/V13_T2_Right(Top)_500X_3D.info',
 '../data/experiment_03/V18_T2_Left(Bottom)_500X_3D.info',
 '../data/experiment_03/Polished_V11_T1_Right(Top)_500X_3D.info',
 '../data/experiment_03/V13_T1_Right(Top)_500X_3D.info',
 '../data/experiment_03/V14_T1_Left(Bottom)_500X_3D.info',
 '../data/experiment_03/V18_T2_Right(Top)_500X_3D.info',
 '../data/experiment_03/V11_T2_Left(Bottom)_500X_3D.info',
 '../data/experiment_03/Polished_V17_T1_Left(Bottom)_500X_3D.info',
 '../data/experiment_03/Polished_V11_T2_Right(Top)_500X_3D.info',
 '../data/experiment_03/Polished_V11_T2_Left(Bottom)_500X_3D.info',
 '../data/experiment_03/V17_T1_Right(Top)_500X_3D.info',
 '../data/experiment_03/Polished_V13_T1_Right(Top)_500X_3D.info',
 '../data/experiment_03/V11_T1_Left(Bottom)_500X_3D.info',
 '../data/experiment_03/V11_T1_Right(Top)_500X_3D.info',
 '../data/experiment_03/V12_T2_Right(Top)_500X_3D.info',
 '../data/experiment_03/V14_T1_Right(Top)_500X_3D.info',
 '../data/experiment_03/V12_T1_Left(Bottom)_500X_3D.info',
 '../data/experiment_03/Polished_V17_T2_Left(Bottom)_500X_3D.info',
 '../data/experiment_03/V17_T1_Left(Bottom)_500X_3D.info',
 '../data/experiment_03/V18_T1_Right(Top)_500X_3D.info',
 '../data/experiment_03/V12_T2_Left(Bottom)_500X_3D.info',
 '../data/experiment_03/Polished_V17_T1_Right(Top)_500X_3D.info',
 '../data/experiment_03/Polished_V13_T2_Left(Bottom)_500X_3D.info',
 '../data/experiment_03/Polished_V13_T1_Left(Bottom)_500X_3D.info',
 '../data/experiment_03/V13_T2_Left(Bottom)_500X_3D.info',
 '../data/experiment_03/Polished_V13_T2_Right(Top)_500X_3D.info',
 '../data/experiment_03/Polished_V17_T2_Right(Top)_500X_3D.info',
 '../data/experiment_03/V14_T2_Right(Top)_500X_3D.info',
 '../data/experiment_03/V18_T1_Left(Bottom)_500X_3D.info']
In [38]:
file = np.random.choice(files)
file = '../data/experiment_03/V11_T1_Right(Top)_500X_3D.info'
print(file)
tmp = joblib.load(file)
try:
    image = np.array(tmp['value'].x)
except:
    image = tmp
../data/experiment_03/V11_T1_Right(Top)_500X_3D.info
In [39]:
%matplotlib notebook
# create the x and y coordinate arrays (here we just use pixel indices)
from additive.experimental import plot_3d_surface
In [40]:
from additive.preprocessing import process_image
tilter = process_image(image, degree=2)
[ 1.41432612e-02 -5.71556934e-04 -6.06154037e-07 -6.07045537e-09]
In [41]:
tilter.mean(), image.mean()
Out[41]:
(147.82883315943, 158.0686)
In [42]:
fig, axes = plt.subplots(figsize=(8, 8))
ax = fig.gca(projection='3d')
plot_3d_surface(tilter, step=70, ax=ax, cmap='jet')
plot_3d_surface(image+500, step=70, ax=ax, cmap='jet')
/home/bzr0014/miniconda3/envs/additive/lib/python3.6/site-packages/ipykernel_launcher.py:2: UserWarning: Requested projection is different from current axis projection, creating new axis with requested projection.
  
Out[42]:
<mpl_toolkits.mplot3d.art3d.Poly3DCollection at 0x7f015d9b5ac8>
In [43]:
# plt.savefig(f"/home/ben_rasoolov/additive_data/figures/3d_tilted_vs_original_{f}.png", dpi=300)
In [44]:
%matplotlib inline
font = {'size'   : 15}
matplotlib.rc('font', **font)
fig, axes = plt.subplots(figsize=(15, 3))
axis=1
t1 = image[500:-500, 500:-500]
t2 = tilter[500:-500, 500:-500]
t1 = t1 - t1.mean()
t2 = t2 - t2.mean()
plt.plot(np.arange(t1.shape[1-axis]), t1.mean(axis=axis), label="Original", linewidth=3)
plt.plot(np.arange(t2.shape[1-axis]), t2.mean(axis=axis), label="Tilted & aligned", linewidth=3)
axes.legend()
d, f, e = dfe(file)
plt.savefig(f"../data/figures/side_view_tilted_vs_original_{f}.png", dpi=300)
In [ ]:
import tensorflow as tf
In [ ]:
%load_ext autoreload
%autoreload 2
import numpy as np
import joblib
import glob
files = glob.glob("/home/ben_rasoolov/additive_data/experiment_03/*info")
file_name = np.random.choice(files)
print(file_name)
data = joblib.load(file_name)
image = np.array(data['value'].x)
In [ ]:
from additive.preprocessing import *
In [ ]:
thresh = image.mean()-image.std()
get_image_alignment_slope(image[:, 300:-300], thresh)#, image.mean()-image.std())
In [ ]:
rotated_image = correct_aligment(image, 300)
In [ ]:
get_image_alignment_slope(rotated_image[:, 300:-300], 0)
In [ ]:
import matplotlib.pyplot as plt
plt.imshow(rotated_image)
In [ ]:
from additive.preprocessing import adjust_tilt
In [ ]:
tilted_rotated_image = adjust_tilt(rotated_image[1000:-1000, 300:-300], 8)
In [ ]:
tilted_rotated_image.max()
In [ ]:
plt.imshow(tilted_rotated_image)
In [ ]:
plt.imshow(rotated_image[1000:-1000, 300:-300])

Experiment 3: getting global stats

In [1]:
%load_ext autoreload
%autoreload 2
%reset -f
import numpy as np
import joblib
import glob
from imports import *
from additive.feature_functions import feature_functions_functions as feature_funs
from additive.features import Features
from additive.preprocessing import load_and_process_image
from functools import reduce
from operator import or_
In [2]:
def isin(values, text, ignore_case=False):
    if ignore_case:
        return reduce(or_, [v.lower() in text.lower() for v in values])
    return reduce(or_, [v in text for v in values])
In [3]:
################### ################### ################### ###################
# version 1: all files
#files = glob.glob("/home/ben_rasoolov/additive_data/experiment_03/*info")
#chosen_files = [f for f in files if isin(['v18', 'v12', 'v14', 'v13', 'v11', 'v17'], f, ignore_case=True)]
# version 2: v19 repeated measurements
################### ################### ################### ###################
info_files = glob.glob("/home/ben_rasoolov/additive_project/data/experiment_03//*_2.info")
chosen_files = info_files + [x.replace("_2", "") for x in info_files]
len(chosen_files)
Out[3]:
8
In [122]:
def get_features(img, features=None):
    if features is None:
        return {feature: fun(img) for feature, fun in feature_funs.items()}
    return {feature: fun(img) for feature, fun in feature_funs.items() if feature in features}
In [5]:
from multiprocessing import Pool
import dask
def get_features_from_files(chosen_files, n_procs=8):
    images = bag.from_sequence(chosen_files)
    preprocessed_images = images.map(load_and_process_image,
                                 transform_fun=lambda data: np.array(data['value'].x), crop_size=(500, 300))
    features = preprocessed_images.map(get_features)
    with Pool(min(len(chosen_files), n_procs)) as p:
        with dask.config.set(pool=p):
            out = features.compute()
    return out
In [6]:
out = get_features_from_files(chosen_files)
[ 3.07227852e-03 -2.76017534e-03  3.11530051e-10  2.58426966e-07]
[ 5.31283997e-03 -7.50884270e-04 -5.12736799e-07  5.28862862e-09]
[ 7.28704370e-03  5.96422919e-04 -1.08035714e-06 -2.71210044e-07]
[ 3.18884910e-03 -1.12411201e-03 -5.38034798e-07  1.39545450e-07]
[ 7.37593597e-03  6.27058679e-04 -4.08412231e-07  1.56355537e-07]
[ 1.29852664e-02  1.02466631e-04 -1.86451899e-06  6.28399364e-08]
[ 7.67650896e-03 -1.30926715e-04 -1.18232699e-06  3.14384162e-07]
[ 9.88180838e-03  1.11280179e-03 -7.32214807e-07  8.97498073e-08]
In [7]:
import pandas as pd
from additive.utility import get_file_info
df = pd.DataFrame(dict(zip(chosen_files, out))).T
df.index = df.index.map(lambda x: x.split("/")[-1].split(".")[0]).rename("file")
df = df.reset_index()
df = get_file_info(df['file']).join(df)
df
Out[7]:
ispolished specimen T RL file ra_1d rq_1d rsk_1d rku_1d rp_1d rv_1d ra_2d rq_2d rp_2d rv_2d rsk_2d rku_2d
0 False V19 T1 R V19_T1_Right(Top)_500X_3D_2 17.153707 21.003688 0.285355 2.630660 58.136482 47.280207 17.318466 21.243959 90.559808 63.503547 0.296151 2.662597
1 False V19 T2 R V19_T2_Right(Top)_500X_3D_2 21.073246 25.881079 0.176053 2.650911 70.556634 61.533729 21.329844 26.250163 115.246064 74.088965 0.205515 2.691686
2 False V19 T1 L V19_T1_Left(Bottom)_500X_3D_2 17.262126 21.181428 0.292029 2.683889 61.468178 46.909103 17.386746 21.412423 108.171758 61.679967 0.332384 2.789340
3 False V19 T2 L V19_T2_Left(Bottom)_500X_3D_2 20.947103 25.736105 0.134779 2.720703 68.994370 63.440827 21.201433 26.128029 126.151415 220.645633 0.165582 2.831415
4 False V19 T1 R V19_T1_Right(Top)_500X_3D 17.743369 21.699239 0.374067 2.679157 63.137758 46.921243 17.894806 21.931783 96.008826 63.881027 0.388786 2.710847
5 False V19 T2 R V19_T2_Right(Top)_500X_3D 22.013782 26.931811 0.210536 2.635093 74.340308 61.690260 22.529411 27.570966 119.371693 88.857183 0.246334 2.653679
6 False V19 T1 L V19_T1_Left(Bottom)_500X_3D 17.394297 21.292760 0.344018 2.704562 63.269488 46.717130 17.539195 21.547308 100.548958 85.585189 0.381685 2.815992
7 False V19 T2 L V19_T2_Left(Bottom)_500X_3D 21.643182 26.475391 0.186703 2.633285 72.243692 61.522641 22.018653 27.023870 118.424690 79.835154 0.236882 2.741941
In [8]:
res = df.iloc[:4, 5:].values - df.iloc[4:, 5:].values
In [12]:
from scipy.stats import t
t_value = t.ppf(.95, len(res)-1)
result = ((res.mean(axis=0) - res.std(axis=0)/2*t_value)>0) | ((res.mean(axis=0) + res.std(axis=0)/2*t_value)<0)
In [18]:
p1 = t.cdf(res.mean(axis=0) - res.std(axis=0)/2*t_value, len(res)-1)
p2 = 1 - t.cdf(res.mean(axis=0) + res.std(axis=0)/2*t_value, len(res)-1)
pvalue = np.minimum(p1, p2)
pvalue
Out[18]:
array([2.09480165e-01, 1.85694937e-01, 4.70623194e-01, 4.74862735e-01,
       8.55409976e-03, 1.13572210e-01, 1.69687063e-01, 1.48046941e-01,
       1.54765531e-03, 9.67695177e-07, 4.67990495e-01, 4.71718225e-01])
In [30]:
pd.concat([pd.Series(result.reshape(-1), name='reject_null'), 
           pd.Series(pvalue.reshape(-1), name='pvalue').round(decimals=2)], axis=1)
Out[30]:
reject_null pvalue
0 True 0.21
1 True 0.19
2 True 0.47
3 False 0.47
4 True 0.01
5 False 0.11
6 True 0.17
7 True 0.15
8 False 0.00
9 False 0.00
10 True 0.47
11 False 0.47
In [ ]:
#df.to_csv("/home/ben_rasoolov/additive_data/paper/global_stats_tilted_rotated_cropped_v02.csv")

Statistical tests

In [2]:
%load_ext autoreload
%autoreload 2
%reset -f
from imports import *
The autoreload extension is already loaded. To reload it, use:
  %reload_ext autoreload
In [3]:
#client = Client("tcp://10.142.0.26:8786")
#client = Client(scheduler_file="/home/ben_rasoolov/sched")
#client.restart()
In [5]:
df = pd.read_csv("/home/ben_rasoolov/additive_project/data/paper/data/global_stats_tilted_rotated_cropped_v02.csv")
res = df.groupby(['specimen', 'ispolished']).agg(['mean'])
In [6]:
tmp = df.sort_values(['ispolished', 'specimen', 'T', 'RL'])
In [7]:
r = tmp[tmp['RL'] == 'R']
l = tmp[tmp['RL'] == 'L']
rl = r.merge(l, on=['ispolished', 'specimen', 'T'], suffixes=["_r", "_l"])
In [8]:
cols = ['ra_1d', 'rq_1d',
       'rsk_1d', 'rku_1d', 'rp_1d', 'rv_1d', 'ra_2d', 'rq_2d',
       'rp_2d', 'rv_2d', 'rsk_2d', 'rku_2d']
col = np.random.choice(cols)
for col in cols:
    m = (rl[col+"_r"] - rl[col+"_l"]).std()/np.sqrt(len(rl))
    t1 = (rl[col+"_r"] - rl[col+"_l"]).mean() - 1.75 * m
    t2 = (rl[col+"_r"] - rl[col+"_l"]).mean() + 1.75 * m
    print(col, t1, t2)
ra_1d -0.29316302015631557 1.036193849802721
rq_1d -0.17777777798646016 1.177307755441356
rsk_1d -0.12494176580932576 0.06311393476027599
rku_1d -0.3254137505947224 0.36110903169177916
rp_1d -0.8437729792916301 1.975157118373886
rv_1d 0.5906327272247072 2.5087913326933213
ra_2d -0.2735570707720188 1.0416534191733668
rq_2d -0.1574477248722843 1.1776586877126682
rp_2d -11.039696243514546 2.394457756979916
rv_2d -0.9669497488438359 5.966071751897761
rsk_2d -0.12104258856503139 0.048531157511327944
rku_2d -0.26266747616250447 0.34506696662629094
In [9]:
df
Out[9]:
ispolished specimen T RL file ra_1d rq_1d rsk_1d rku_1d rp_1d rv_1d ra_2d rq_2d rp_2d rv_2d rsk_2d rku_2d
0 False V17 T2 R V17_T2_Right(Top)_500X_3D 20.367282 25.141885 -0.043377 2.692481 66.080730 66.391047 20.512985 25.420736 97.474517 94.288419 0.000569 2.781080
1 False V13 T1 L V13_T1_Left(Bottom)_500X_3D 21.104536 26.193603 0.032069 2.879431 70.324724 74.886901 21.314308 26.541568 103.122105 99.855399 0.046101 2.941080
2 True V11 T1 L Polished_V11_T1_Left(Bottom)_500X_3D 4.527431 6.208072 -2.083207 7.491663 5.929927 28.451027 4.862916 6.593996 29.729452 46.743327 -1.759887 6.331805
3 False V11 T2 R V11_T2_Right(Top)_500X_3D 19.051853 23.286449 0.069212 2.556587 60.847256 55.875495 19.321375 23.652111 85.683018 73.513093 0.091341 2.591286
4 False V12 T1 R V12_T1_Right(Top)_500X_3D 19.643611 24.341770 -0.136598 2.906672 61.436551 70.503110 19.969894 24.865100 110.516329 121.344634 -0.162652 3.100259
5 False V17 T2 L V17_T2_Left(Bottom)_500X_3D 17.995050 22.312795 -0.019567 2.769843 58.561165 61.415199 18.154397 22.612336 84.475816 81.658029 0.023732 2.835941
6 False V14 T2 L V14_T2_Left(Bottom)_500X_3D 17.208462 21.524176 0.578931 3.525852 74.900303 45.582311 17.327398 21.812648 149.435735 59.269789 0.694874 4.153309
7 False V13 T2 R V13_T2_Right(Top)_500X_3D 9.673651 12.161879 -1.451079 4.620403 11.913197 45.538838 9.861045 12.521731 19.472986 63.180704 -1.351469 4.410962
8 False V18 T2 L V18_T2_Left(Bottom)_500X_3D 24.144756 29.346941 0.017985 2.483493 74.027413 71.203955 24.466728 29.806544 113.083384 89.575979 0.055424 2.515604
9 True V11 T1 R Polished_V11_T1_Right(Top)_500X_3D 6.192697 7.930921 -1.656373 5.331377 7.676874 31.480531 6.428117 8.244573 14.717631 51.072400 -1.432599 4.665817
10 False V13 T1 R V13_T1_Right(Top)_500X_3D 21.668242 26.974085 0.103231 2.875700 74.689298 74.596831 21.888691 27.385602 112.350512 106.843780 0.140409 2.987951
11 False V14 T1 L V14_T1_Left(Bottom)_500X_3D 16.441025 20.102957 0.211128 2.639909 57.559230 46.490097 16.544520 20.257070 101.963819 58.582385 0.228543 2.687087
12 False V18 T2 R V18_T2_Right(Top)_500X_3D 26.158866 31.653838 -0.053646 2.407116 75.228121 78.436840 26.485163 32.025681 109.199964 96.345796 -0.047313 2.397637
13 False V11 T2 L V11_T2_Left(Bottom)_500X_3D 18.695940 22.990538 0.059173 2.640564 62.974622 55.797110 18.830656 23.226673 113.395424 77.404308 0.079364 2.711695
14 True V17 T1 L Polished_V17_T1_Left(Bottom)_500X_3D 10.498238 12.427238 -0.212123 2.207155 21.022304 33.709958 10.569368 12.533908 29.516371 47.834022 -0.202740 2.209217
15 True V11 T2 R Polished_V11_T2_Right(Top)_500X_3D 7.913196 9.857359 -1.438924 4.422782 10.449421 35.860442 8.072428 10.150749 39.829623 48.653958 -1.300808 4.073087
16 True V11 T2 L Polished_V11_T2_Left(Bottom)_500X_3D 7.958603 9.815106 -1.368657 4.341406 10.520284 36.089186 8.070356 10.064526 15.929695 67.529944 -1.225809 3.928057
17 False V17 T1 R V17_T1_Right(Top)_500X_3D 16.407634 20.874124 -0.021723 3.171323 61.134055 58.394124 16.535235 21.086493 107.004222 78.228277 0.007915 3.291560
18 True V13 T1 R Polished_V13_T1_Right(Top)_500X_3D 12.156369 14.046193 -0.751848 2.459677 16.153497 39.609140 12.315946 14.300910 20.368888 66.351780 -0.710786 2.394007
19 False V11 T1 L V11_T1_Left(Bottom)_500X_3D 19.792682 24.456594 0.231876 2.865466 72.894686 56.415843 20.030997 24.881321 126.454422 72.300355 0.319020 3.117125
20 False V11 T1 R V11_T1_Right(Top)_500X_3D 20.674016 25.587523 0.351087 2.876343 75.648138 55.552691 20.813035 25.863454 125.943341 73.534159 0.424204 3.153140
21 False V12 T2 R V12_T2_Right(Top)_500X_3D 19.553369 24.280886 -0.170903 2.970940 60.345250 74.236954 19.744568 24.620367 91.394056 113.097155 -0.199378 3.106228
22 False V14 T1 R V14_T1_Right(Top)_500X_3D 16.278273 19.907712 0.176003 2.645924 57.220145 46.602017 16.414690 20.103743 106.020119 62.588979 0.201426 2.714299
23 False V12 T1 L V12_T1_Left(Bottom)_500X_3D 19.010940 23.534222 -0.093366 2.802292 60.258749 67.526613 19.517084 24.187682 128.327797 109.846882 -0.113183 2.818710
24 True V17 T2 L Polished_V17_T2_Left(Bottom)_500X_3D 11.992086 14.335027 -0.648876 2.727019 20.433179 43.701991 12.116263 14.611104 77.231297 61.602650 -0.598584 2.757794
25 False V17 T1 L V17_T1_Left(Bottom)_500X_3D 16.047365 20.188340 -0.093679 2.992685 56.615596 55.442637 16.119475 20.347713 111.908920 71.410388 -0.064032 3.124954
26 False V18 T1 R V18_T1_Right(Top)_500X_3D 23.160717 28.192667 0.099168 2.524672 75.252877 65.618219 23.541102 28.707470 115.714656 88.372131 0.135216 2.584600
27 False V12 T2 L V12_T2_Left(Bottom)_500X_3D 19.311123 23.887852 -0.167967 2.890995 58.988632 72.426631 19.572037 24.286837 84.783775 108.908407 -0.171732 2.980683
28 True V17 T1 R Polished_V17_T1_Right(Top)_500X_3D 11.030199 12.974124 -0.353556 2.234523 19.664089 34.625986 11.164770 13.135659 22.556317 48.780352 -0.345022 2.208558
29 True V13 T2 L Polished_V13_T2_Left(Bottom)_500X_3D 13.283742 15.626065 -0.923787 2.826922 17.205495 46.826339 13.412949 15.919650 21.471980 72.756115 -0.868210 2.735324
30 True V13 T1 L Polished_V13_T1_Left(Bottom)_500X_3D 10.849642 12.911247 -1.011653 3.079410 13.918803 39.674545 10.999738 13.217730 21.130112 57.363571 -0.957459 2.995107
31 False V13 T2 L V13_T2_Left(Bottom)_500X_3D 13.283742 15.626065 -0.923787 2.826922 17.205495 46.826339 13.412949 15.919650 21.471980 72.756115 -0.868210 2.735324
32 True V13 T2 R Polished_V13_T2_Right(Top)_500X_3D 9.673651 12.161879 -1.451079 4.620403 11.913197 45.538838 9.861045 12.521731 19.472986 63.180704 -1.351469 4.410962
33 True V17 T2 R Polished_V17_T2_Right(Top)_500X_3D 13.642760 16.134591 -0.527518 2.425814 22.267575 45.397845 13.802137 16.466252 27.942098 62.059206 -0.483563 2.458828
34 False V14 T2 R V14_T2_Right(Top)_500X_3D 17.548699 21.797333 0.443097 3.176959 70.241797 47.857071 17.667926 22.041712 155.159576 71.950326 0.538243 3.679699
35 False V18 T1 L V18_T1_Left(Bottom)_500X_3D 21.962441 26.822609 0.157134 2.607411 74.639006 61.754518 22.165148 27.111219 125.195902 82.996088 0.189654 2.689549

Extracting features

In [93]:
%reset -f
%load_ext autoreload
%autoreload 2
from additive.features import Features
from additive.utility import dfe
from imports import *
The autoreload extension is already loaded. To reload it, use:
  %reload_ext autoreload
In [102]:
root_dir = "/home/ben_rasoolov/additive_project/data/"
def get_undone_files(big_file_dir, info_file_dir):
    root_dir = dfe(big_file_dir)[0]
    big_files  = glob.glob(big_file_dir)
    info_files = glob.glob(info_file_dir)
    undone_files = [f"{root_dir}/{x}.pd" 
                    for x in set(dfe(x)[1] for x in big_files)-set(dfe(x)[1] for x in info_files)]
    return undone_files
In [103]:
undone_files = get_undone_files(root_dir+"original_images/*pd", root_dir+"experiment_03/*info")
undone_files
Out[103]:
['/home/ben_rasoolov/additive_project/data/original_images//V19_T2_Left(Bottom)_500X_3D_2.pd',
 '/home/ben_rasoolov/additive_project/data/original_images//V19_T1_Left(Bottom)_500X_3D_2.pd',
 '/home/ben_rasoolov/additive_project/data/original_images//V19_T2_Right(Top)_500X_3D_2.pd',
 '/home/ben_rasoolov/additive_project/data/original_images//V19_T1_Right(Top)_500X_3D_2.pd']
In [107]:
from additive.preprocessing import gkern2d
from scipy.ndimage import zoom, convolve
def get_all_stats(files, n_processes=4):
    s = np.array([2.330435, 2.33016])
    M = 31
    _ = gkern2d(M, 5)
    k3 = _ / _.sum()
    original_images = bag.from_sequence(files).map(joblib.load)
    resized_images   = original_images.map(zoom, zoom=1/s)
    smoothed_images = resized_images.map(convolve, weights=k3)
    c = smoothed_images.map(Features).map(lambda x: x.run_all_tests())
    from multiprocessing import Pool
    with dask.config.set(pool=Pool(min(n_processes, len(files)))):
        all_stats = c.compute()
    return all_stats
In [108]:
all_stats = get_all_stats(undone_files)
[*] Kernel sizes extracted
[*] Kernel sizes extracted
[*] Kernel sizes extracted
[*] Kernel sizes extracted
[*] Local minima extracted
[*] Local minima extracted
[*] Local minima extracted
[*] Local minima extracted
[*] Local maxima extracted
[*] Starting to run all tests.
[*] Starting to get global properties
[*] Local maxima extracted
[*] Starting to run all tests.
[*] Starting to get global properties
[*] Local maxima extracted
[*] Starting to run all tests.
[*] Starting to get global properties
[*] Local maxima extracted
[*] Starting to run all tests.
[*] Starting to get global properties
[*] Finished extracting circles from profile     0/ 6671
[*] Finished extracting circles from profile     0/ 6667
[*] Finished extracting circles from profile     0/ 6675
[*] Finished extracting circles from profile     0/ 6667
/home/ben_rasoolov/miniconda3/envs/additive/lib/python3.6/site-packages/scipy/optimize/minpack.py:454: RuntimeWarning: Number of calls to function has reached maxfev = 800.
  warnings.warn(errors[info][0], RuntimeWarning)
/home/ben_rasoolov/miniconda3/envs/additive/lib/python3.6/site-packages/scipy/optimize/minpack.py:454: RuntimeWarning: Number of calls to function has reached maxfev = 800.
  warnings.warn(errors[info][0], RuntimeWarning)
/home/ben_rasoolov/miniconda3/envs/additive/lib/python3.6/site-packages/scipy/optimize/minpack.py:454: RuntimeWarning: Number of calls to function has reached maxfev = 800.
  warnings.warn(errors[info][0], RuntimeWarning)
/home/ben_rasoolov/miniconda3/envs/additive/lib/python3.6/site-packages/scipy/optimize/minpack.py:454: RuntimeWarning: Number of calls to function has reached maxfev = 800.
  warnings.warn(errors[info][0], RuntimeWarning)
[*] Finished extracting circles from profile   500/ 6667
[*] Finished extracting circles from profile   500/ 6675
[*] Finished extracting circles from profile   500/ 6671
[*] Finished extracting circles from profile   500/ 6667
[*] Finished extracting circles from profile  1000/ 6667
[*] Finished extracting circles from profile  1000/ 6671
[*] Finished extracting circles from profile  1000/ 6675
[*] Finished extracting circles from profile  1000/ 6667
[*] Finished extracting circles from profile  1500/ 6667
[*] Finished extracting circles from profile  1500/ 6671
[*] Finished extracting circles from profile  1500/ 6675
[*] Finished extracting circles from profile  1500/ 6667
[*] Finished extracting circles from profile  2000/ 6667
[*] Finished extracting circles from profile  2000/ 6671
[*] Finished extracting circles from profile  2000/ 6667
[*] Finished extracting circles from profile  2000/ 6675
[*] Finished extracting circles from profile  2500/ 6667
[*] Finished extracting circles from profile  2500/ 6671
[*] Finished extracting circles from profile  2500/ 6667
[*] Finished extracting circles from profile  2500/ 6675
[*] Finished extracting circles from profile  3000/ 6667
[*] Finished extracting circles from profile  3000/ 6671
[*] Finished extracting circles from profile  3000/ 6667
[*] Finished extracting circles from profile  3000/ 6675
[*] Finished extracting circles from profile  3500/ 6667
[*] Finished extracting circles from profile  3500/ 6671
[*] Finished extracting circles from profile  3500/ 6667
[*] Finished extracting circles from profile  3500/ 6675
[*] Finished extracting circles from profile  4000/ 6667
[*] Finished extracting circles from profile  4000/ 6671
[*] Finished extracting circles from profile  4000/ 6667
[*] Finished extracting circles from profile  4000/ 6675
[*] Finished extracting circles from profile  4500/ 6667
[*] Finished extracting circles from profile  4500/ 6671
[*] Finished extracting circles from profile  4500/ 6667
[*] Finished extracting circles from profile  4500/ 6675
[*] Finished extracting circles from profile  5000/ 6667
[*] Finished extracting circles from profile  5000/ 6671
[*] Finished extracting circles from profile  5000/ 6667
[*] Finished extracting circles from profile  5000/ 6675
[*] Finished extracting circles from profile  5500/ 6667
[*] Finished extracting circles from profile  5500/ 6671
[*] Finished extracting circles from profile  5500/ 6667
[*] Finished extracting circles from profile  5500/ 6675
[*] Finished extracting circles from profile  6000/ 6667
[*] Finished extracting circles from profile  6000/ 6671
[*] Finished extracting circles from profile  6000/ 6667
[*] Finished extracting circles from profile  6000/ 6675
[*] Finished extracting circles from profile  6500/ 6667
[*] Finished extracting circles from profile  6500/ 6671
[*] Finished extracting circles from profile  6500/ 6667
[*] Finished extracting circles from profile  6500/ 6675
[*] Starting to get global properties
[*] Starting to get global properties
[*] Starting to get global properties
/home/ben_rasoolov/additive_project/additive/features.py:84: UserWarning: Warning: converting a masked element to nan.
  np.array([np.mean(np.sort(self.x[n][self.local_minima[n]])[-10:]) for n in range(len(self.x))])
/home/ben_rasoolov/additive_project/additive/features.py:84: UserWarning: Warning: converting a masked element to nan.
  np.array([np.mean(np.sort(self.x[n][self.local_minima[n]])[-10:]) for n in range(len(self.x))])
/home/ben_rasoolov/additive_project/additive/features.py:84: UserWarning: Warning: converting a masked element to nan.
  np.array([np.mean(np.sort(self.x[n][self.local_minima[n]])[-10:]) for n in range(len(self.x))])
[*] Starting to get global properties
/home/ben_rasoolov/additive_project/additive/features.py:84: UserWarning: Warning: converting a masked element to nan.
  np.array([np.mean(np.sort(self.x[n][self.local_minima[n]])[-10:]) for n in range(len(self.x))])
Process ForkPoolWorker-32:
Process ForkPoolWorker-31:
Process ForkPoolWorker-33:
Process ForkPoolWorker-34:
Traceback (most recent call last):
Traceback (most recent call last):
Traceback (most recent call last):
Traceback (most recent call last):
  File "/home/ben_rasoolov/miniconda3/envs/additive/lib/python3.6/multiprocessing/process.py", line 258, in _bootstrap
    self.run()
  File "/home/ben_rasoolov/miniconda3/envs/additive/lib/python3.6/multiprocessing/process.py", line 258, in _bootstrap
    self.run()
  File "/home/ben_rasoolov/miniconda3/envs/additive/lib/python3.6/multiprocessing/process.py", line 258, in _bootstrap
    self.run()
  File "/home/ben_rasoolov/miniconda3/envs/additive/lib/python3.6/multiprocessing/process.py", line 258, in _bootstrap
    self.run()
  File "/home/ben_rasoolov/miniconda3/envs/additive/lib/python3.6/multiprocessing/process.py", line 93, in run
    self._target(*self._args, **self._kwargs)
  File "/home/ben_rasoolov/miniconda3/envs/additive/lib/python3.6/multiprocessing/process.py", line 93, in run
    self._target(*self._args, **self._kwargs)
  File "/home/ben_rasoolov/miniconda3/envs/additive/lib/python3.6/multiprocessing/process.py", line 93, in run
    self._target(*self._args, **self._kwargs)
  File "/home/ben_rasoolov/miniconda3/envs/additive/lib/python3.6/multiprocessing/pool.py", line 108, in worker
    task = get()
  File "/home/ben_rasoolov/miniconda3/envs/additive/lib/python3.6/multiprocessing/process.py", line 93, in run
    self._target(*self._args, **self._kwargs)
  File "/home/ben_rasoolov/miniconda3/envs/additive/lib/python3.6/multiprocessing/pool.py", line 108, in worker
    task = get()
  File "/home/ben_rasoolov/miniconda3/envs/additive/lib/python3.6/multiprocessing/pool.py", line 108, in worker
    task = get()
  File "/home/ben_rasoolov/miniconda3/envs/additive/lib/python3.6/multiprocessing/queues.py", line 335, in get
    res = self._reader.recv_bytes()
  File "/home/ben_rasoolov/miniconda3/envs/additive/lib/python3.6/multiprocessing/pool.py", line 108, in worker
    task = get()
  File "/home/ben_rasoolov/miniconda3/envs/additive/lib/python3.6/multiprocessing/queues.py", line 334, in get
    with self._rlock:
  File "/home/ben_rasoolov/miniconda3/envs/additive/lib/python3.6/multiprocessing/connection.py", line 216, in recv_bytes
    buf = self._recv_bytes(maxlength)
  File "/home/ben_rasoolov/miniconda3/envs/additive/lib/python3.6/multiprocessing/queues.py", line 334, in get
    with self._rlock:
  File "/home/ben_rasoolov/miniconda3/envs/additive/lib/python3.6/multiprocessing/queues.py", line 334, in get
    with self._rlock:
  File "/home/ben_rasoolov/miniconda3/envs/additive/lib/python3.6/multiprocessing/synchronize.py", line 95, in __enter__
    return self._semlock.__enter__()
  File "/home/ben_rasoolov/miniconda3/envs/additive/lib/python3.6/multiprocessing/connection.py", line 407, in _recv_bytes
    buf = self._recv(4)
  File "/home/ben_rasoolov/miniconda3/envs/additive/lib/python3.6/multiprocessing/synchronize.py", line 95, in __enter__
    return self._semlock.__enter__()
KeyboardInterrupt
  File "/home/ben_rasoolov/miniconda3/envs/additive/lib/python3.6/multiprocessing/synchronize.py", line 95, in __enter__
    return self._semlock.__enter__()
  File "/home/ben_rasoolov/miniconda3/envs/additive/lib/python3.6/multiprocessing/connection.py", line 379, in _recv
    chunk = read(handle, remaining)
KeyboardInterrupt
KeyboardInterrupt
KeyboardInterrupt
In [112]:
for name, value in zip(undone_files, all_stats):
    dic = {'name': name, 'variation': 'Normal', 'value': value}
    d, f, e = dfe(name)
    path = root_dir+"experiment_03/"+f+".info"
    print(path)
    if os.path.exists(path):
        print(f"path {path} exists")
        continue
    joblib.dump(dic, path)
/home/ben_rasoolov/additive_project/data/experiment_03/V19_T2_Left(Bottom)_500X_3D_2.info
/home/ben_rasoolov/additive_project/data/experiment_03/V19_T1_Left(Bottom)_500X_3D_2.info
/home/ben_rasoolov/additive_project/data/experiment_03/V19_T2_Right(Top)_500X_3D_2.info
/home/ben_rasoolov/additive_project/data/experiment_03/V19_T1_Right(Top)_500X_3D_2.info

Getting Stats for different variations

In [1]:
%reset -f
%load_ext autoreload
%autoreload 2
from imports import *
from additive.features import Features
root = "/home/ben_rasoolov/additive_project/data/dataset"
file_names = set(glob.glob(f'{root}/*_3d.features'))  - {f'{root}/v06_T2_L_3d.features'}
len(file_names)
Out[1]:
22
In [2]:
images_mapper = [
      'Original', # lambda x, #x, 
      '25% area', # lambda x, # random_sub_image(x, .5),
      '6.25% area', # lambda x, # random_sub_image(x, .25),

      '50% length', # lambda x, # random_sub_length_image(x, .5),
      '25% length', # lambda x, # random_sub_length_image(x, .25),
#          '50% length rotate', # lambda x, # align_image(random_sub_length_image(x)),
#          '50% length tilt', # lambda x, # adjust_tilt(random_sub_length_image(x)),
      'Tilted', # lambda x, # adjust_tilt(x),
      'Tilted & Rotated', # lambda x, # adjust_tilt(align_image(x))
      '50% width', # lambda x, # random_sub_width_image(x),
      '25% width', # lambda x, # random_sub_width_image(x, .25),
#          '50% width tilt', # lambda x, # adjust_tilt(random_sub_width_image(x)),
#          '50% random', # lambda x, # random_sub_profile_image(x, .50),

]
In [4]:
indices = [0, 1, 2, 3, 4, 7, 8]
out = []
def get_all_stats(file_name):
    res = joblib.load(file_name)
    print(file_name)
    tmp = pd.concat([res[i].statistics.assign(Variation=images_mapper[i]) for i in indices])
    return tmp.assign(file=file_name.split("/")[-1])
with Pool(8) as p:
    out = p.map(get_all_stats, file_names)
/home/ben_rasoolov/additive_project/data/dataset/v04_T2_L_3d.features
/home/ben_rasoolov/additive_project/data/dataset/v05_T1_R_3d.features
/home/ben_rasoolov/additive_project/data/dataset/v03_T2_L_3d.features
/home/ben_rasoolov/additive_project/data/dataset/v02_T1_R_3d.features
/home/ben_rasoolov/additive_project/data/dataset/v06_T1_R_3d.features
/home/ben_rasoolov/additive_project/data/dataset/v05_T1_L_3d.features
/home/ben_rasoolov/additive_project/data/dataset/v05_T2_L_3d.features
/home/ben_rasoolov/additive_project/data/dataset/v01_T1_L_3d.features
/home/ben_rasoolov/additive_project/data/dataset/v02_T1_L_3d.features
/home/ben_rasoolov/additive_project/data/dataset/v06_T1_L_3d.features
/home/ben_rasoolov/additive_project/data/dataset/v01_T2_L_3d.features
/home/ben_rasoolov/additive_project/data/dataset/v04_T2_R_3d.features
/home/ben_rasoolov/additive_project/data/dataset/v02_T2_R_3d.features
/home/ben_rasoolov/additive_project/data/dataset/v02_T2_L_3d.features
/home/ben_rasoolov/additive_project/data/dataset/v04_T1_L_3d.features
/home/ben_rasoolov/additive_project/data/dataset/v04_T1_R_3d.features
/home/ben_rasoolov/additive_project/data/dataset/v01_T1_R_3d.features
/home/ben_rasoolov/additive_project/data/dataset/v05_T2_R_3d.features
/home/ben_rasoolov/additive_project/data/dataset/v03_T2_R_3d.features
/home/ben_rasoolov/additive_project/data/dataset/v03_T1_L_3d.features
/home/ben_rasoolov/additive_project/data/dataset/v01_T2_R_3d.features
/home/ben_rasoolov/additive_project/data/dataset/v03_T1_R_3d.features
In [6]:
stats = pd.concat(out)
In [7]:
stats.to_csv("/home/ben_rasoolov/additive_project/data/paper/stats.csv", index=False)
In [28]:
cond = stats['Variation'] != '6.25% area'
data = stats[cond].groupby(['file', 'Variation']).mean().reset_index()
data = data.set_index(['Variation', 'file']).stack().rename('Statistic Value($\mu m$)').reset_index()\
    .rename(columns = {"level_2": "Measure"})
In [29]:
sns.set_palette("deep")
ax = sns.catplot(data=data, kind='bar',  height=8.27, aspect=11.7/8.27, 
                 x='Measure', y='Statistic Value($\mu m$)', hue='Variation')
#savefig('global_measure_comparison.svg')
In [1]:
%reset -f
%load_ext autoreload
%autoreload 2
from imports import *
from additive.feature_functions import feature_functions_functions as feature_functions
In [2]:
files = ['/home/ben_rasoolov/additive_project/data/experiment_03/V17_T2_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V13_T1_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V11_T1_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V11_T2_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V12_T1_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V17_T2_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V14_T2_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V13_T2_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V18_T2_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V11_T1_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V13_T1_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V14_T1_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V18_T2_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V11_T2_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V17_T1_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V11_T2_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V11_T2_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V17_T1_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V13_T1_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V11_T1_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V11_T1_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V12_T2_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V14_T1_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V12_T1_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V17_T2_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V17_T1_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V18_T1_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V12_T2_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V17_T1_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V13_T2_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V13_T1_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V13_T2_Left(Bottom)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V13_T2_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V17_T2_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V14_T2_Right(Top)_500X_3D.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V18_T1_Left(Bottom)_500X_3D.info']
In [28]:
variations = []

def var_fun(fun):
    variations.append((fun.__name__, fun))
    return fun

@var_fun
def original(img):
    return img

@var_fun
def half_area_center(img):
    w, h = img.shape
    cw, ch = w//2, h//2
    #x, y = np.random.randint(max(1,w-ww)), np.random.randint(max(1,h-hh))
    return img[cw-w//4:cw+w//4, ch-h//4:ch+h//4]

@var_fun
def half_length_center(img):
    w, h = img.shape
    cw, ch = w//2, h//2
    return img[:, ch-h//4:ch+h//4]

@var_fun
def half_width_center(img):
    w, h = img.shape
    cw, ch = w//2, h//2
    return img[cw-w//4:cw+w//4, :]

@var_fun
def random_profiles(img, ratio=.5):
    return img[np.random.rand(len(img))<ratio]


@var_fun
def half_length_left(img, ratio=.5):
    w, h = img.shape
    return img[:, :h//2]

@var_fun
def half_length_right(img, ratio=.5):
    w, h = img.shape
    return img[:, h//2:]

@var_fun
def half_width_top(img, ratio=.5):
    w, h = img.shape
    return img[:w//2, :]

@var_fun
def half_width_bottom(img, ratio=.5):
    w, h = img.shape
    return img[w//2:, :]
In [29]:
my_funs = [(k, v) for k, v in feature_functions.items() if '1d' in k]
In [35]:
def prepare_image(data):
    image = np.array(data['value'].x)[500:-500, 500:-500]
    out = {}
    for variation, var_fun in variations:
        for feature, feature_fun in my_funs:
            x = var_fun(image)
            print(x.shape)
            out[variation, feature] = feature_fun(x)
    return out

Download files

In [18]:
%load_ext autoreload
%autoreload 2
%reset -f
from imports import *
from additive.utility import dfe
from sh import wget
The autoreload extension is already loaded. To reload it, use:
  %reload_ext autoreload
In [19]:
links = {
    "V19_T1_Left(Bottom)_500X_3D_2.csv": "https://auburn.box.com/shared/static/0iy6dopmmq8c8owpkjc1rddn48czeam1.csv",
    "V19_T1_Right(Top)_500X_3D_2.csv": "https://auburn.box.com/shared/static/i39htg5ku0wxphc9l6yebe63g7qatsd9.csv",
    "V19_T2_Left(Bottom)_500X_3D_2.csv": "https://auburn.box.com/shared/static/856x1lsjqr0ldl7dlesv1rsyzdw84b3x.csv",
    "V19_T2_Right(Top)_500X_3D_2.csv": "https://auburn.box.com/shared/static/6kykrfjuwiounoduupk4yebuq2ah4k80.csv"
}
In [22]:
#for name, link in links.items():
#    wget(link, "-O", "/home/ben_rasoolov/additive_project/data/original_images/"+name)
In [23]:
csv_files = glob.glob("/home/ben_rasoolov/additive_project/data/original_images/*csv")
csv_files
Out[23]:
['/home/ben_rasoolov/additive_project/data/original_images/V19_T1_Left(Bottom)_500X_3D_2.csv',
 '/home/ben_rasoolov/additive_project/data/original_images/V19_T2_Left(Bottom)_500X_3D_2.csv',
 '/home/ben_rasoolov/additive_project/data/original_images/V19_T2_Right(Top)_500X_3D_2.csv',
 '/home/ben_rasoolov/additive_project/data/original_images/V19_T1_Right(Top)_500X_3D_2.csv']
In [27]:
def proces_csv_files(file):
    print(file)
    d, f, e = dfe(file)
    x = pd.read_csv(file, header=None)
    joblib.dump(x.values.astype('float32'), d+f+".pd")
In [28]:
with Pool(4) as p:
    p.map(proces_csv_files, csv_files)
/home/ben_rasoolov/additive_project/data/original_images/V19_T2_Left(Bottom)_500X_3D_2.csv/home/ben_rasoolov/additive_project/data/original_images/V19_T1_Right(Top)_500X_3D_2.csv/home/ben_rasoolov/additive_project/data/original_images/V19_T1_Left(Bottom)_500X_3D_2.csv/home/ben_rasoolov/additive_project/data/original_images/V19_T2_Right(Top)_500X_3D_2.csv



Experiment: Comparing repeated measures

In [86]:
%load_ext autoreload
%autoreload 2
%reset -f
The autoreload extension is already loaded. To reload it, use:
  %reload_ext autoreload
In [87]:
from imports import *
from additive.utility import dfe, get_file_info
from sh import wget
from numpy import genfromtxt
In [88]:
info_files = glob.glob("/home/ben_rasoolov/additive_project/data/experiment_03//*_2.info")
info_files = info_files + [x.replace("_2", "") for x in info_files]
In [90]:
def get_stats_from_file(data):
    if isinstance(data, str):
        data = joblib.load(data)['value']
    return data.statistics, data.circle_statistics#.describe()
In [91]:
with Pool(8) as p:
    all_stats_ = p.map(get_stats_from_file, info_files)
In [95]:
global_stats_ = [x for x, y in all_stats_]
circle_stats_ = [y for x, y in all_stats_]
In [96]:
global_stats = pd.concat([x.assign(file=dfe(f)[1]) for x, f in zip(global_stats_, info_files)])\
    .reset_index()#.drop('index', axis=1)#.rename(columns={"index": "measure"})
global_stats['repetition'] = global_stats['file'].str.contains("_2") * 1 + 1
global_stats['file'] = global_stats['file'].str.replace("_2", '')
global_stats
Out[96]:
index rv ra rz rq r10_iso file repetition
0 0 180.668280 28.297204 18.416615 40.537096 NaN V19_T1_Right(Top)_500X_3D 2
1 1 179.286918 29.452539 19.470803 41.059895 NaN V19_T1_Right(Top)_500X_3D 2
2 2 176.691939 31.504457 21.499101 42.336516 NaN V19_T1_Right(Top)_500X_3D 2
3 3 173.161809 34.531464 24.366313 44.567935 NaN V19_T1_Right(Top)_500X_3D 2
4 4 169.000613 38.458735 27.918929 47.696514 NaN V19_T1_Right(Top)_500X_3D 2
... ... ... ... ... ... ... ... ...
55684 7245 87.966996 16.938349 18.855455 22.201247 60.226305 V19_T2_Left(Bottom)_500X_3D 1
55685 7246 75.403560 13.912015 14.814021 18.700508 49.211100 V19_T2_Left(Bottom)_500X_3D 1
55686 7247 62.561338 11.055807 11.367236 15.344890 39.316375 V19_T2_Left(Bottom)_500X_3D 1
55687 7248 51.975379 8.899100 8.829132 12.696007 31.425529 V19_T2_Left(Bottom)_500X_3D 1
55688 7249 45.982700 7.752109 7.481262 11.238276 27.938360 V19_T2_Left(Bottom)_500X_3D 1

55689 rows × 8 columns

In [107]:
%matplotlib inline
fig, ax = plt.subplots(figsize=(15, 8))
#tt = t.stack().reset_index().rename(columns={'level_2': 'measure', 0: 'value'})
sns.barplot(data=global_stats, x='file', y='ra', hue='repetition')
Out[107]:
<matplotlib.axes._subplots.AxesSubplot at 0x7f97a2c701d0>
In [106]:
from scipy.stats import ttest_ind
for col in ['rho', 'ra', 'rv', 'rz', 'rq']:
    if col not in global_stats.columns:
        continue
    for file in np.unique(global_stats['file']):
        cond1 = (global_stats['repetition'] == 1) & (global_stats['file'] == file)
        cond2 = (global_stats['repetition'] == 2) & (global_stats['file'] == file)
        ra = global_stats[col]
        pvalue = ttest_ind(ra[cond1], ra[cond2]).pvalue
        if pvalue > .05:
            print(file, col, pvalue)
V19_T2_Left(Bottom)_500X_3D ra 0.15426378822213835
V19_T2_Left(Bottom)_500X_3D rv 0.15046199388792791
V19_T2_Left(Bottom)_500X_3D rq 0.056542146837501715
In [32]:
s = np.array([2.330435, 2.33016])
1/s
Out[32]:
array([0.42910444, 0.42915508])

polished vs unpolished surface analysis with equaizeHist

Todo:

  • align and tilt before doing histogram equalization and see if the results match
In [1]:
%load_ext autoreload
%autoreload 2
%reset -f
from imports import *
In [2]:
_ = [x.split('\t') for x in """Surface Condition	specimen	Frequency (Hz)	Strain Amplitude (mm/mm)	Fatigue Life (cylces)	Reversal to failure (2Nf)
As-built	V02	5	0.004	45859	91718
As-built	V10	5	0.004	49177	98354
As-built	V04	7.5	0.003	91222	182444
As-built	V16	7.5	0.003	110013	220026
As-built	V08	7.5	0.003	136464	272928
As-built	V12	8	0.0025	192404	384808
As-built	V18	8	0.0025	259128	518256
As-built	V14	8	0.0025	320856	641712
As-built	V06	10	0.002	5000000	10000000
Half-polished	V07	5	0.004	50916	101832
Half-polished	V09	5	0.004	60992	121984
Half-polished	V15	7.5	0.003	132668	265336
Half-polished	V05	7.5	0.003	153540	307080
Half-polished	V03	7.5	0.003	163123	326246
Half-polished	V13	8	0.0025	287061	574122
Half-polished	V11	8	0.0025	291206	582412
Half-polished	V17	8	0.0025	395801	791602
Half-polished	V01	10	0.002	5000000	10000000""".split('\n')]
fatigue = pd.DataFrame(_[1:], columns=_[0])
#fatigue['Specimen ID'] = fatigue['Specimen ID']#.str.lower()
for c in ['Frequency (Hz)', 'Strain Amplitude (mm/mm)', 'Fatigue Life (cylces)', 'Reversal to failure (2Nf)']:
    fatigue[c] = pd.to_numeric(fatigue[c])
fatigue['ispolished'] = fatigue['Surface Condition'].str.contains('polish')
fatigue
Out[2]:
Surface Condition specimen Frequency (Hz) Strain Amplitude (mm/mm) Fatigue Life (cylces) Reversal to failure (2Nf) ispolished
0 As-built V02 5.0 0.0040 45859 91718 False
1 As-built V10 5.0 0.0040 49177 98354 False
2 As-built V04 7.5 0.0030 91222 182444 False
3 As-built V16 7.5 0.0030 110013 220026 False
4 As-built V08 7.5 0.0030 136464 272928 False
5 As-built V12 8.0 0.0025 192404 384808 False
6 As-built V18 8.0 0.0025 259128 518256 False
7 As-built V14 8.0 0.0025 320856 641712 False
8 As-built V06 10.0 0.0020 5000000 10000000 False
9 Half-polished V07 5.0 0.0040 50916 101832 True
10 Half-polished V09 5.0 0.0040 60992 121984 True
11 Half-polished V15 7.5 0.0030 132668 265336 True
12 Half-polished V05 7.5 0.0030 153540 307080 True
13 Half-polished V03 7.5 0.0030 163123 326246 True
14 Half-polished V13 8.0 0.0025 287061 574122 True
15 Half-polished V11 8.0 0.0025 291206 582412 True
16 Half-polished V17 8.0 0.0025 395801 791602 True
17 Half-polished V01 10.0 0.0020 5000000 10000000 True
In [3]:
from additive.utility import dfe, get_file_info
files = glob.glob("/home/ben_rasoolov/additive_project/data/experiment_03/*.info")
# files = get_file_info(pd.Series(files)).merge(fatigue, on='specimen')['files']
# files = files[~files.str.contains('V13')]
In [4]:
#polished_files = glob.glob("/home/ben_rasoolov/additive_project/data/experiment_03/Polished_*.info")
#asbuilt_files = [x.replace("Polished_", '') for x in polished_files]
In [5]:
# all_files = [ x for x in polished_files+asbuilt_files if os.path.exists(x)]
In [6]:
def min_max_scale(x, a=0, b=1):
    mn, mx = x.min(), x.max()
    rng = b - a
    out = (x - mn)/(mx-mn)
    return out * rng + a
In [7]:
from scipy.stats import mode
from additive.feature_functions import feature_functions_functions as feat_funs
def image_stats(x, feat_funs):
    funs = {k: v for k, v in feat_funs.items()}
    return compute({k: delayed(v)(x) for k, v in funs.items()})[0]
In [8]:
from additive.preprocessing import correct_aligment
def read_file_equalize_and_get_stats(file):
    data = joblib.load(file)
    image = np.array(data['value'].x)[500:-500, 500:-500]
    # image = load_and_process_image(file, lambda x: x['value'].x, (500, 500))
    # image = correct_aligment(image, l=300)
    scaled_image = min_max_scale(image, 0, 255).astype('uint8')
    equalized_image = cv2.equalizeHist(scaled_image)
    return image_stats(equalized_image)

def read_file_and_get_stats(file):
    data = joblib.load(file)
    image = np.array(data['value'].x)[500:-500, 500:-500]
    # image = load_and_process_image(file, lambda x: x['value'].x, (500, 500))
    # image = correct_aligment(image, l=300)
    return image_stats(image)
In [ ]:
with Pool(8) as p:
    # res = p.map(read_file_equalize_and_get_stats, files)
    res = p.map(read_file_and_get_stats, files)
In [10]:
out = pd.DataFrame(res).assign(file=[dfe(x)[1] for x in files])
equalized_stats = get_file_info(out, 'file').drop('file', axis=1)
In [17]:
# equalized_stats.to_csv("/home/ben_rasoolov/additive_project/data/paper/data/global_stats_equalize_hist_v01.csv")
files_to_drop = {'/home/ben_rasoolov/additive_project/data/experiment_03/V02_T1_L_3d.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V02_T1_R_3d.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V02_T2_L_3d.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V02_T2_R_3d.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V03_T1_L_3d.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V03_T1_R_3d.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V03_T2_L_3d.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V03_T2_R_3d.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V04_T1_L_3d.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V04_T1_R_3d.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V04_T2_L_3d.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V04_T2_R_3d.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V05_T1_L_3d.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V05_T1_R_3d.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V05_T2_L_3d.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V05_T2_R_3d.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V06_T1_L_3d.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V06_T1_R_3d.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V06_T2_L_3d.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V06_T2_R_3d.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V19_T1_Left(Bottom)_500X_3D_2.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V19_T1_Right(Top)_500X_3D_2.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V19_T2_Left(Bottom)_500X_3D_2.info',
 '/home/ben_rasoolov/additive_project/data/experiment_03/V19_T2_Right(Top)_500X_3D_2.info'}
t = equalized_stats.assign(file=files)
cond = ~t.file.isin(files_to_drop)
t[cond].to_csv("/home/ben_rasoolov/additive_project/data/paper/data/global_stats_no_transform_v01.csv")
In [12]:
cond = equalized_stats['ispolished']
polished_vs_unpolished = equalized_stats[cond]\
    .merge(
        equalized_stats[~cond], 
        on=['specimen', 'T', 'RL'],
        suffixes = ['_polished', '_unpolished']
    ).sort_values(['specimen', 'T', 'RL'])
In [124]:
chosen = polished_vs_unpolished[['specimen', 'T', 'RL', 'rp_2d_polished', 'rp_2d_unpolished', 'rv_2d_polished', 'rv_2d_unpolished']]
tmp = chosen# [chosen['specimen'] != 'V13']
In [136]:
res = equalized_stats[equalized_stats['specimen'] != 'V13'].groupby(['ispolished', 'specimen']).median()\
    .reset_index().merge(fatigue, on='specimen')
res['Reversal to failure (2Nf)'] = np.log(res['Reversal to failure (2Nf)'])
res = res.sort_values(['ispolished_x'], ascending=False).drop_duplicates('specimen')
In [137]:
# res = pd.read_csv("/home/ben_rasoolov/additive_project/data/paper/data/hist_equalized_global_stats_v02.csv")
res['Reversal to failure (2Nf)'] = np.exp(res['Reversal to failure (2Nf)'])
In [138]:
res.sort_values(['Reversal to failure (2Nf)'])
Out[138]:
ispolished_x specimen ra_1d rq_1d rsk_1d rku_1d rp_1d rv_1d ra_2d rq_2d ... mode_1d mode_2d median mean Surface Condition Frequency (Hz) Strain Amplitude (mm/mm) Fatigue Life (cylces) Reversal to failure (2Nf) ispolished_y
2 False V02 62.284003 72.099675 -0.024943 1.874644 123.896189 127.507305 63.714914 73.590112 ... 2.944868 -5.5 129.0 128.792558 As-built 5.0 0.0040 45859 91718.0 False
14 False V10 61.405975 71.170445 -0.026037 1.899773 123.290664 126.871472 63.697031 73.555911 ... 7.884428 -18.5 128.0 128.412229 As-built 5.0 0.0040 49177 98354.0 False
10 True V07 58.370217 67.089260 -0.133901 1.888236 104.590633 124.607512 63.804496 73.666763 ... 47.534579 -8.0 128.5 128.358510 Half-polished 5.0 0.0040 50916 101832.0 True
13 True V09 48.948604 57.519433 -0.461537 2.630725 88.008317 124.608756 64.273125 74.052439 ... 35.526880 47.5 129.0 128.728096 Half-polished 5.0 0.0040 60992 121984.0 True
5 False V04 60.376728 70.305721 -0.021962 1.980787 124.030143 127.369881 63.757717 73.631955 ... 7.590736 2.5 130.0 128.719705 As-built 7.5 0.0030 91222 182444.0 False
21 False V16 62.764816 72.585250 -0.013355 1.845383 124.580007 127.184034 63.686873 73.547529 ... -3.556843 -21.0 128.5 128.581611 As-built 7.5 0.0030 110013 220026.0 False
20 True V15 43.947882 51.731925 -0.482821 2.582426 78.847079 115.232571 63.923035 73.796936 ... 44.501221 93.5 128.5 128.382323 Half-polished 7.5 0.0030 132668 265336.0 True
11 False V08 61.575268 71.413332 -0.016413 1.899997 124.423290 126.579382 63.717955 73.567644 ... -0.854986 6.0 129.5 128.553609 As-built 7.5 0.0030 136464 272928.0 False
7 True V05 48.782117 57.213929 -0.296048 2.253622 88.180401 124.328500 64.064994 73.889726 ... 33.933969 46.5 128.0 128.456295 Half-polished 7.5 0.0030 153540 307080.0 True
4 True V03 46.138077 54.372172 -0.660386 2.647313 72.296833 125.542178 64.096129 73.922122 ... 44.721489 74.5 128.5 128.507025 Half-polished 7.5 0.0030 163123 326246.0 True
17 False V12 61.534946 71.191909 -0.018903 1.887337 123.233029 126.545415 63.794720 73.656413 ... 3.036812 -9.5 129.0 128.754246 As-built 8.0 0.0025 192404 384808.0 False
24 False V18 60.608636 70.342762 0.014297 2.087752 123.534422 126.721850 63.741046 73.596413 ... 15.812460 -2.5 129.0 128.447424 As-built 8.0 0.0025 259128 518256.0 False
16 True V11 41.736293 49.028744 -0.745729 3.176900 69.576776 120.134997 64.181059 74.062860 ... 36.787074 53.0 128.0 128.685210 Half-polished 8.0 0.0025 291206 582412.0 True
18 False V14 59.687899 69.471718 -0.013167 1.984921 123.746290 125.937712 63.641839 73.511164 ... -7.030726 -6.5 129.5 128.732125 As-built 8.0 0.0025 320856 641712.0 False
23 True V17 58.583211 67.272757 -0.288679 1.917459 98.488066 126.842198 63.772418 73.672438 ... 63.137674 -14.0 128.0 128.286057 Half-polished 8.0 0.0025 395801 791602.0 True
8 False V06 48.431843 57.868091 0.013777 2.626135 115.140520 120.377513 63.770432 73.632697 ... 2.101947 -4.0 129.5 128.793138 As-built 10.0 0.0020 5000000 10000000.0 False
1 True V01 56.355884 64.314820 -0.408120 1.959156 90.637832 125.929290 63.885913 73.778843 ... 66.526132 94.5 129.0 128.497792 Half-polished 10.0 0.0020 5000000 10000000.0 True

17 rows × 24 columns

In [141]:
for strain in sorted(res['Strain Amplitude (mm/mm)'].unique()):
    cond = res['Strain Amplitude (mm/mm)'] == strain
    sns.scatterplot(data=res[cond], x='mode_1d', y='Reversal to failure (2Nf)', hue='ispolished_x')
    plt.title(strain)
    plt.show()
In [42]:
import re
from additive.utility import pick_cols
pick_cols(res, '(ispolished)|(Frequency)|(Fatigue)', reverse=True)\
    .sort_values('Reversal to failure (2Nf)')\
    #.to_csv( "/home/ben_rasoolov/additive_project/data/paper/data/hist_equalized_global_stats_v02.csv")
Out[42]:
specimen ra_1d rq_1d rsk_1d rku_1d rp_1d rv_1d ra_2d rq_2d rp_2d rv_2d rsk_2d rku_2d mode_1d mode_2d median mean Surface Condition Strain Amplitude (mm/mm) Reversal to failure (2Nf)
2 V02 62.119727 71.940323 -0.025170 1.884803 124.003473 127.397553 63.716802 73.589046 126.199550 128.800450 -0.017911 1.801539 2.144227 -7.625 129.125 128.800450 As-built 0.0040 91718.0
14 V10 61.351317 71.167198 -0.027082 1.905115 123.286870 126.927140 63.697043 73.555404 126.557163 128.442837 -0.011004 1.800566 9.974985 -16.500 128.500 128.442837 As-built 0.0040 98354.0
10 V07 57.991264 66.608832 -0.134614 1.882088 104.400884 124.127296 63.804029 73.672172 126.654215 128.345785 -0.008766 1.801073 53.848065 14.250 128.500 128.345785 Half-polished 0.0040 101832.0
13 V09 48.500368 57.091276 -0.441006 2.692799 87.123127 124.403833 64.281298 74.060677 126.209240 128.790760 -0.013108 1.786769 35.728565 45.500 129.250 128.790760 Half-polished 0.0040 121984.0
5 V04 59.448304 69.380732 -0.022977 2.037573 123.331469 126.751334 63.758218 73.622208 126.193281 128.806719 -0.018553 1.799998 7.304917 2.375 129.625 128.806719 As-built 0.0030 182444.0
21 V16 62.755319 72.569349 -0.014693 1.845472 124.564646 127.226072 63.678521 73.533297 126.417498 128.582502 -0.011676 1.801352 -6.487822 -23.750 128.750 128.582502 As-built 0.0030 220026.0
20 V15 44.155439 51.810092 -0.476343 2.549454 79.313925 115.426169 63.914563 73.809302 126.593722 128.406278 -0.007087 1.799071 46.120527 91.750 128.500 128.406278 Half-polished 0.0030 265336.0
11 V08 61.616568 71.472211 -0.017599 1.900562 124.414485 126.606179 63.724200 73.576144 126.440536 128.559464 -0.014241 1.800352 -1.375329 6.000 129.250 128.559464 As-built 0.0030 272928.0
7 V05 49.232581 58.183169 -0.300888 2.266286 91.064030 124.564406 64.066617 73.903371 126.545167 128.454833 -0.008241 1.793100 36.478512 53.250 128.250 128.454833 Half-polished 0.0030 307080.0
4 V03 46.995392 55.244381 -0.696228 2.653482 73.559058 125.584553 64.088518 73.922949 126.494465 128.505535 -0.007778 1.792050 45.395849 71.750 128.500 128.505535 Half-polished 0.0030 326246.0
17 V12 61.519966 71.268187 -0.019844 1.893568 123.488058 126.169313 63.786792 73.646185 126.221360 128.778640 -0.017628 1.799703 3.843312 -4.000 129.250 128.778640 As-built 0.0025 384808.0
24 V18 60.604642 70.303568 0.019395 2.114391 123.226621 126.576398 63.739904 73.596630 126.555493 128.444507 -0.011423 1.799742 15.914726 -1.250 128.750 128.444507 As-built 0.0025 518256.0
16 V11 41.080385 48.747603 -0.669675 3.138285 67.816128 119.382994 64.211779 74.053869 126.328097 128.671903 -0.010481 1.789900 32.553409 51.250 128.500 128.671903 Half-polished 0.0025 582412.0
18 V14 59.657695 69.399252 -0.005193 1.992617 123.392246 125.938817 63.661046 73.514391 126.245654 128.754346 -0.015208 1.801502 -6.498436 -8.000 129.500 128.754346 As-built 0.0025 641712.0
23 V17 58.172486 66.927868 -0.295113 1.922344 98.818941 126.869701 63.772401 73.668294 126.695439 128.304561 -0.007517 1.802545 62.581427 -18.250 128.000 128.304561 Half-polished 0.0025 791602.0
8 V06 48.545229 57.734485 0.028323 2.638455 114.076626 117.144519 63.775645 73.639274 126.189780 128.810220 -0.017669 1.799477 2.944106 -1.000 129.750 128.810220 As-built 0.0020 10000000.0
1 V01 56.806961 64.787442 -0.405448 1.958130 91.148023 125.852803 63.889889 73.781657 126.507071 128.492929 -0.006824 1.800958 67.679014 97.500 129.000 128.492929 Half-polished 0.0020 10000000.0
In [44]:
#cond = res['specimen'] > 'V06'
In [101]:
#x = res[['ra_1d']]
X = pick_cols(res, '(.*_.d)|(Strain)').values
y = np.log(res['Reversal to failure (2Nf)'].values)
In [103]:
from sklearn.preprocessing import StandardScaler, MinMaxScaler
from sklearn.model_selection import train_test_split
In [104]:
Xtrain, Xtest, ytrain, ytest = train_test_split(X, y, test_size=.2)
In [105]:
scaler = StandardScaler()
Xtrain = scaler.fit_transform(Xtrain)
In [84]:
from sklearn.ensemble import RandomForestRegressor
In [87]:
model = RandomForestRegressor(n_estimators=1000, n_jobs=10)
model.fit(Xtrain, ytrain)
Out[87]:
RandomForestRegressor(bootstrap=True, ccp_alpha=0.0, criterion='mse',
                      max_depth=None, max_features='auto', max_leaf_nodes=None,
                      max_samples=None, min_impurity_decrease=0.0,
                      min_impurity_split=None, min_samples_leaf=1,
                      min_samples_split=2, min_weight_fraction_leaf=0.0,
                      n_estimators=1000, n_jobs=10, oob_score=False,
                      random_state=None, verbose=0, warm_start=False)
In [88]:
#np.sqrt(((model.predict(Xtrain) - ytrain)**2).sum())
np.sqrt(((model.predict(Xtest) - ytest)**2).sum())
Out[88]:
3.48689652420815
In [89]:
plt.scatter(model.predict(X), y)
Out[89]:
<matplotlib.collections.PathCollection at 0x7fd479ee2eb8>
In [106]:
from tensorflow import keras
In [118]:
model = keras.Sequential([
    keras.Input(Xtrain.shape[1]),
    keras.layers.Dense(50, activation='relu'),
    keras.layers.Dropout(.3),
    keras.layers.Dense(100, activation='relu'),
    keras.layers.Dropout(.3),
    keras.layers.Dense(50, activation='relu'),
    keras.layers.Dense(1),
])
In [119]:
model.compile(optimizer='adam', loss='mse', )
In [120]:
model.fit(Xtrain, ytrain, epochs=10000, validation_split=.2, 
          callbacks=[keras.callbacks.EarlyStopping(patience=100, restore_best_weights=True)])
Train on 10 samples, validate on 3 samples
Epoch 1/10000
10/10 [==============================] - 1s 93ms/sample - loss: 162.8927 - val_loss: 200.0758
Epoch 2/10000
10/10 [==============================] - 0s 3ms/sample - loss: 161.9148 - val_loss: 196.0650
Epoch 3/10000
10/10 [==============================] - 0s 2ms/sample - loss: 160.7957 - val_loss: 192.0151
Epoch 4/10000
10/10 [==============================] - 0s 4ms/sample - loss: 159.9731 - val_loss: 188.1530
Epoch 5/10000
10/10 [==============================] - 0s 3ms/sample - loss: 151.6514 - val_loss: 184.3623
Epoch 6/10000
10/10 [==============================] - 0s 3ms/sample - loss: 154.0235 - val_loss: 180.5762
Epoch 7/10000
10/10 [==============================] - 0s 4ms/sample - loss: 152.0637 - val_loss: 176.8147
Epoch 8/10000
10/10 [==============================] - 0s 3ms/sample - loss: 148.2878 - val_loss: 172.9492
Epoch 9/10000
10/10 [==============================] - 0s 4ms/sample - loss: 145.4906 - val_loss: 169.1922
Epoch 10/10000
10/10 [==============================] - 0s 4ms/sample - loss: 138.6044 - val_loss: 165.5122
Epoch 11/10000
10/10 [==============================] - 0s 4ms/sample - loss: 136.5130 - val_loss: 161.8098
Epoch 12/10000
10/10 [==============================] - 0s 4ms/sample - loss: 140.9270 - val_loss: 158.0332
Epoch 13/10000
10/10 [==============================] - 0s 4ms/sample - loss: 129.9804 - val_loss: 154.0675
Epoch 14/10000
10/10 [==============================] - 0s 3ms/sample - loss: 135.3415 - val_loss: 150.0517
Epoch 15/10000
10/10 [==============================] - 0s 3ms/sample - loss: 123.7104 - val_loss: 145.8864
Epoch 16/10000
10/10 [==============================] - 0s 3ms/sample - loss: 120.7910 - val_loss: 141.6228
Epoch 17/10000
10/10 [==============================] - 0s 3ms/sample - loss: 122.9982 - val_loss: 137.2230
Epoch 18/10000
10/10 [==============================] - 0s 5ms/sample - loss: 117.2836 - val_loss: 132.6436
Epoch 19/10000
10/10 [==============================] - 0s 5ms/sample - loss: 118.1766 - val_loss: 127.9185
Epoch 20/10000
10/10 [==============================] - 0s 5ms/sample - loss: 113.9162 - val_loss: 123.0843
Epoch 21/10000
10/10 [==============================] - 0s 4ms/sample - loss: 102.9619 - val_loss: 118.0178
Epoch 22/10000
10/10 [==============================] - 0s 5ms/sample - loss: 100.8196 - val_loss: 112.8039
Epoch 23/10000
10/10 [==============================] - 0s 6ms/sample - loss: 100.8209 - val_loss: 107.5207
Epoch 24/10000
10/10 [==============================] - 0s 7ms/sample - loss: 90.0909 - val_loss: 102.1409
Epoch 25/10000
10/10 [==============================] - 0s 5ms/sample - loss: 79.7876 - val_loss: 96.6354
Epoch 26/10000
10/10 [==============================] - 0s 4ms/sample - loss: 86.3936 - val_loss: 90.9281
Epoch 27/10000
10/10 [==============================] - 0s 4ms/sample - loss: 84.7955 - val_loss: 84.9770
Epoch 28/10000
10/10 [==============================] - 0s 3ms/sample - loss: 75.6779 - val_loss: 78.9952
Epoch 29/10000
10/10 [==============================] - 0s 4ms/sample - loss: 68.1226 - val_loss: 73.0229
Epoch 30/10000
10/10 [==============================] - 0s 4ms/sample - loss: 62.5081 - val_loss: 67.0091
Epoch 31/10000
10/10 [==============================] - 0s 4ms/sample - loss: 63.7002 - val_loss: 61.0944
Epoch 32/10000
10/10 [==============================] - 0s 4ms/sample - loss: 53.9298 - val_loss: 55.2272
Epoch 33/10000
10/10 [==============================] - 0s 3ms/sample - loss: 47.0352 - val_loss: 49.5137
Epoch 34/10000
10/10 [==============================] - 0s 4ms/sample - loss: 49.3685 - val_loss: 44.0394
Epoch 35/10000
10/10 [==============================] - 0s 2ms/sample - loss: 33.0721 - val_loss: 38.8644
Epoch 36/10000
10/10 [==============================] - 0s 4ms/sample - loss: 37.7272 - val_loss: 34.0569
Epoch 37/10000
10/10 [==============================] - 0s 5ms/sample - loss: 31.9184 - val_loss: 29.6049
Epoch 38/10000
10/10 [==============================] - 0s 4ms/sample - loss: 31.3728 - val_loss: 25.5560
Epoch 39/10000
10/10 [==============================] - 0s 4ms/sample - loss: 26.3051 - val_loss: 21.9965
Epoch 40/10000
10/10 [==============================] - 0s 4ms/sample - loss: 30.7249 - val_loss: 18.9653
Epoch 41/10000
10/10 [==============================] - 0s 5ms/sample - loss: 27.5435 - val_loss: 16.3817
Epoch 42/10000
10/10 [==============================] - 0s 4ms/sample - loss: 23.0250 - val_loss: 14.1672
Epoch 43/10000
10/10 [==============================] - 0s 4ms/sample - loss: 12.5122 - val_loss: 12.3080
Epoch 44/10000
10/10 [==============================] - 0s 3ms/sample - loss: 16.9885 - val_loss: 10.8570
Epoch 45/10000
10/10 [==============================] - 0s 5ms/sample - loss: 13.1492 - val_loss: 9.7764
Epoch 46/10000
10/10 [==============================] - 0s 3ms/sample - loss: 16.5067 - val_loss: 9.0555
Epoch 47/10000
10/10 [==============================] - 0s 5ms/sample - loss: 28.2097 - val_loss: 8.5230
Epoch 48/10000
10/10 [==============================] - 0s 4ms/sample - loss: 9.3296 - val_loss: 8.2189
Epoch 49/10000
10/10 [==============================] - 0s 6ms/sample - loss: 8.2244 - val_loss: 8.0024
Epoch 50/10000
10/10 [==============================] - 0s 4ms/sample - loss: 13.4640 - val_loss: 7.8894
Epoch 51/10000
10/10 [==============================] - 0s 4ms/sample - loss: 20.6747 - val_loss: 7.7489
Epoch 52/10000
10/10 [==============================] - 0s 3ms/sample - loss: 13.5393 - val_loss: 7.7595
Epoch 53/10000
10/10 [==============================] - 0s 4ms/sample - loss: 9.3202 - val_loss: 7.8839
Epoch 54/10000
10/10 [==============================] - 0s 3ms/sample - loss: 14.9177 - val_loss: 8.0304
Epoch 55/10000
10/10 [==============================] - 0s 3ms/sample - loss: 18.8864 - val_loss: 8.2712
Epoch 56/10000
10/10 [==============================] - 0s 4ms/sample - loss: 10.2159 - val_loss: 8.4170
Epoch 57/10000
10/10 [==============================] - 0s 4ms/sample - loss: 8.1011 - val_loss: 8.6032
Epoch 58/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.2398 - val_loss: 8.8626
Epoch 59/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.3602 - val_loss: 9.2120
Epoch 60/10000
10/10 [==============================] - 0s 4ms/sample - loss: 5.5693 - val_loss: 9.5484
Epoch 61/10000
10/10 [==============================] - 0s 3ms/sample - loss: 8.6948 - val_loss: 9.8751
Epoch 62/10000
10/10 [==============================] - 0s 4ms/sample - loss: 8.8901 - val_loss: 10.1517
Epoch 63/10000
10/10 [==============================] - 0s 4ms/sample - loss: 7.1712 - val_loss: 10.5052
Epoch 64/10000
10/10 [==============================] - 0s 3ms/sample - loss: 9.2418 - val_loss: 10.7828
Epoch 65/10000
10/10 [==============================] - 0s 4ms/sample - loss: 11.0412 - val_loss: 10.9981
Epoch 66/10000
10/10 [==============================] - 0s 2ms/sample - loss: 4.8210 - val_loss: 11.1803
Epoch 67/10000
10/10 [==============================] - 0s 3ms/sample - loss: 5.4699 - val_loss: 11.2911
Epoch 68/10000
10/10 [==============================] - 0s 2ms/sample - loss: 4.5114 - val_loss: 11.3182
Epoch 69/10000
10/10 [==============================] - 0s 2ms/sample - loss: 18.6890 - val_loss: 11.2155
Epoch 70/10000
10/10 [==============================] - 0s 2ms/sample - loss: 2.7782 - val_loss: 11.1419
Epoch 71/10000
10/10 [==============================] - 0s 2ms/sample - loss: 3.0070 - val_loss: 11.0307
Epoch 72/10000
10/10 [==============================] - 0s 2ms/sample - loss: 9.6952 - val_loss: 10.8530
Epoch 73/10000
10/10 [==============================] - 0s 2ms/sample - loss: 6.5151 - val_loss: 10.7731
Epoch 74/10000
10/10 [==============================] - 0s 2ms/sample - loss: 9.1481 - val_loss: 10.6301
Epoch 75/10000
10/10 [==============================] - 0s 3ms/sample - loss: 6.2422 - val_loss: 10.4590
Epoch 76/10000
10/10 [==============================] - 0s 3ms/sample - loss: 5.5288 - val_loss: 10.2685
Epoch 77/10000
10/10 [==============================] - 0s 3ms/sample - loss: 12.8040 - val_loss: 10.0497
Epoch 78/10000
10/10 [==============================] - 0s 2ms/sample - loss: 7.4473 - val_loss: 9.7480
Epoch 79/10000
10/10 [==============================] - 0s 5ms/sample - loss: 6.7496 - val_loss: 9.4026
Epoch 80/10000
10/10 [==============================] - 0s 3ms/sample - loss: 7.1141 - val_loss: 9.0053
Epoch 81/10000
10/10 [==============================] - 0s 3ms/sample - loss: 10.6616 - val_loss: 8.6378
Epoch 82/10000
10/10 [==============================] - 0s 3ms/sample - loss: 5.3648 - val_loss: 8.2624
Epoch 83/10000
10/10 [==============================] - 0s 4ms/sample - loss: 4.5327 - val_loss: 7.9250
Epoch 84/10000
10/10 [==============================] - 0s 4ms/sample - loss: 11.3056 - val_loss: 7.6289
Epoch 85/10000
10/10 [==============================] - 0s 4ms/sample - loss: 10.1659 - val_loss: 7.3803
Epoch 86/10000
10/10 [==============================] - 0s 4ms/sample - loss: 6.0883 - val_loss: 7.1130
Epoch 87/10000
10/10 [==============================] - 0s 4ms/sample - loss: 7.6211 - val_loss: 6.9023
Epoch 88/10000
10/10 [==============================] - 0s 3ms/sample - loss: 8.3310 - val_loss: 6.6580
Epoch 89/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.6195 - val_loss: 6.4124
Epoch 90/10000
10/10 [==============================] - 0s 3ms/sample - loss: 12.7551 - val_loss: 6.1137
Epoch 91/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.8126 - val_loss: 5.8335
Epoch 92/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.6223 - val_loss: 5.5537
Epoch 93/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.8868 - val_loss: 5.3111
Epoch 94/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.3037 - val_loss: 5.0966
Epoch 95/10000
10/10 [==============================] - 0s 3ms/sample - loss: 6.0531 - val_loss: 4.9218
Epoch 96/10000
10/10 [==============================] - 0s 4ms/sample - loss: 4.6288 - val_loss: 4.7319
Epoch 97/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.3398 - val_loss: 4.5939
Epoch 98/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.4110 - val_loss: 4.5101
Epoch 99/10000
10/10 [==============================] - 0s 4ms/sample - loss: 5.1499 - val_loss: 4.5074
Epoch 100/10000
10/10 [==============================] - 0s 4ms/sample - loss: 6.7309 - val_loss: 4.5611
Epoch 101/10000
10/10 [==============================] - 0s 4ms/sample - loss: 6.0980 - val_loss: 4.5804
Epoch 102/10000
10/10 [==============================] - 0s 6ms/sample - loss: 3.4149 - val_loss: 4.5811
Epoch 103/10000
10/10 [==============================] - 0s 5ms/sample - loss: 3.7826 - val_loss: 4.5569
Epoch 104/10000
10/10 [==============================] - 0s 5ms/sample - loss: 0.7880 - val_loss: 4.5287
Epoch 105/10000
10/10 [==============================] - 0s 6ms/sample - loss: 2.5825 - val_loss: 4.4747
Epoch 106/10000
10/10 [==============================] - 0s 4ms/sample - loss: 5.1254 - val_loss: 4.4284
Epoch 107/10000
10/10 [==============================] - 0s 3ms/sample - loss: 9.3540 - val_loss: 4.4407
Epoch 108/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.5741 - val_loss: 4.4609
Epoch 109/10000
10/10 [==============================] - 0s 4ms/sample - loss: 6.1023 - val_loss: 4.4975
Epoch 110/10000
10/10 [==============================] - 0s 3ms/sample - loss: 9.2326 - val_loss: 4.5775
Epoch 111/10000
10/10 [==============================] - 0s 3ms/sample - loss: 7.2705 - val_loss: 4.6817
Epoch 112/10000
10/10 [==============================] - 0s 4ms/sample - loss: 2.0010 - val_loss: 4.7770
Epoch 113/10000
10/10 [==============================] - 0s 3ms/sample - loss: 5.5175 - val_loss: 4.8604
Epoch 114/10000
10/10 [==============================] - 0s 4ms/sample - loss: 4.6965 - val_loss: 4.9377
Epoch 115/10000
10/10 [==============================] - 0s 3ms/sample - loss: 1.1773 - val_loss: 5.0394
Epoch 116/10000
10/10 [==============================] - 0s 4ms/sample - loss: 7.0156 - val_loss: 5.1674
Epoch 117/10000
10/10 [==============================] - 0s 3ms/sample - loss: 7.7414 - val_loss: 5.2219
Epoch 118/10000
10/10 [==============================] - 0s 4ms/sample - loss: 8.2074 - val_loss: 5.1965
Epoch 119/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.3263 - val_loss: 5.1165
Epoch 120/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.9201 - val_loss: 5.1272
Epoch 121/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.8198 - val_loss: 5.1838
Epoch 122/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.1460 - val_loss: 5.2904
Epoch 123/10000
10/10 [==============================] - 0s 4ms/sample - loss: 4.0228 - val_loss: 5.3663
Epoch 124/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.6232 - val_loss: 5.4271
Epoch 125/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.4588 - val_loss: 5.4896
Epoch 126/10000
10/10 [==============================] - 0s 4ms/sample - loss: 9.7732 - val_loss: 5.4904
Epoch 127/10000
10/10 [==============================] - 0s 4ms/sample - loss: 2.2138 - val_loss: 5.4917
Epoch 128/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.4198 - val_loss: 5.5273
Epoch 129/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.9136 - val_loss: 5.5522
Epoch 130/10000
10/10 [==============================] - 0s 4ms/sample - loss: 1.7121 - val_loss: 5.5948
Epoch 131/10000
10/10 [==============================] - 0s 3ms/sample - loss: 1.6105 - val_loss: 5.6807
Epoch 132/10000
10/10 [==============================] - 0s 4ms/sample - loss: 4.7839 - val_loss: 5.7830
Epoch 133/10000
10/10 [==============================] - 0s 2ms/sample - loss: 5.6985 - val_loss: 5.9343
Epoch 134/10000
10/10 [==============================] - 0s 3ms/sample - loss: 1.8984 - val_loss: 6.0516
Epoch 135/10000
10/10 [==============================] - 0s 3ms/sample - loss: 6.2107 - val_loss: 6.1250
Epoch 136/10000
10/10 [==============================] - 0s 3ms/sample - loss: 5.3195 - val_loss: 6.1977
Epoch 137/10000
10/10 [==============================] - 0s 3ms/sample - loss: 5.1687 - val_loss: 6.2413
Epoch 138/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.7235 - val_loss: 6.2379
Epoch 139/10000
10/10 [==============================] - 0s 3ms/sample - loss: 10.5701 - val_loss: 6.1844
Epoch 140/10000
10/10 [==============================] - 0s 3ms/sample - loss: 8.0371 - val_loss: 6.1517
Epoch 141/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.4074 - val_loss: 6.1235
Epoch 142/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.8062 - val_loss: 6.1000
Epoch 143/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.3423 - val_loss: 6.0708
Epoch 144/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.4055 - val_loss: 6.0743
Epoch 145/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.4513 - val_loss: 6.0777
Epoch 146/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.5623 - val_loss: 6.1127
Epoch 147/10000
10/10 [==============================] - 0s 4ms/sample - loss: 7.8644 - val_loss: 6.0589
Epoch 148/10000
10/10 [==============================] - 0s 5ms/sample - loss: 4.1612 - val_loss: 6.0088
Epoch 149/10000
10/10 [==============================] - 0s 5ms/sample - loss: 3.9408 - val_loss: 6.0047
Epoch 150/10000
10/10 [==============================] - 0s 3ms/sample - loss: 5.1437 - val_loss: 6.0116
Epoch 151/10000
10/10 [==============================] - 0s 2ms/sample - loss: 5.6302 - val_loss: 5.9490
Epoch 152/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.4544 - val_loss: 5.8794
Epoch 153/10000
10/10 [==============================] - 0s 3ms/sample - loss: 5.6715 - val_loss: 5.7766
Epoch 154/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.5167 - val_loss: 5.6556
Epoch 155/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.6270 - val_loss: 5.6452
Epoch 156/10000
10/10 [==============================] - 0s 3ms/sample - loss: 7.5932 - val_loss: 5.6737
Epoch 157/10000
10/10 [==============================] - 0s 2ms/sample - loss: 3.2715 - val_loss: 5.7768
Epoch 158/10000
10/10 [==============================] - 0s 2ms/sample - loss: 3.0850 - val_loss: 5.9000
Epoch 159/10000
10/10 [==============================] - 0s 2ms/sample - loss: 5.1629 - val_loss: 6.0735
Epoch 160/10000
10/10 [==============================] - 0s 2ms/sample - loss: 5.0980 - val_loss: 6.1494
Epoch 161/10000
10/10 [==============================] - 0s 2ms/sample - loss: 3.1484 - val_loss: 6.1910
Epoch 162/10000
10/10 [==============================] - 0s 2ms/sample - loss: 1.7791 - val_loss: 6.1711
Epoch 163/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.1227 - val_loss: 6.1659
Epoch 164/10000
10/10 [==============================] - 0s 2ms/sample - loss: 5.7931 - val_loss: 6.1527
Epoch 165/10000
10/10 [==============================] - 0s 2ms/sample - loss: 7.1886 - val_loss: 6.1808
Epoch 166/10000
10/10 [==============================] - 0s 2ms/sample - loss: 3.2807 - val_loss: 6.0934
Epoch 167/10000
10/10 [==============================] - 0s 2ms/sample - loss: 2.4080 - val_loss: 6.0203
Epoch 168/10000
10/10 [==============================] - 0s 2ms/sample - loss: 7.6696 - val_loss: 5.8218
Epoch 169/10000
10/10 [==============================] - 0s 2ms/sample - loss: 9.0922 - val_loss: 5.6131
Epoch 170/10000
10/10 [==============================] - 0s 2ms/sample - loss: 7.1719 - val_loss: 5.3697
Epoch 171/10000
10/10 [==============================] - 0s 2ms/sample - loss: 2.4679 - val_loss: 5.1187
Epoch 172/10000
10/10 [==============================] - 0s 2ms/sample - loss: 3.7037 - val_loss: 4.8325
Epoch 173/10000
10/10 [==============================] - 0s 2ms/sample - loss: 2.5122 - val_loss: 4.6619
Epoch 174/10000
10/10 [==============================] - 0s 2ms/sample - loss: 6.8974 - val_loss: 4.4942
Epoch 175/10000
10/10 [==============================] - 0s 2ms/sample - loss: 4.5187 - val_loss: 4.4579
Epoch 176/10000
10/10 [==============================] - 0s 2ms/sample - loss: 4.7442 - val_loss: 4.3677
Epoch 177/10000
10/10 [==============================] - 0s 2ms/sample - loss: 3.7286 - val_loss: 4.3933
Epoch 178/10000
10/10 [==============================] - 0s 2ms/sample - loss: 2.9763 - val_loss: 4.4338
Epoch 179/10000
10/10 [==============================] - 0s 2ms/sample - loss: 4.1416 - val_loss: 4.5295
Epoch 180/10000
10/10 [==============================] - 0s 2ms/sample - loss: 10.7178 - val_loss: 4.5500
Epoch 181/10000
10/10 [==============================] - 0s 2ms/sample - loss: 7.7773 - val_loss: 4.5604
Epoch 182/10000
10/10 [==============================] - 0s 2ms/sample - loss: 4.2449 - val_loss: 4.5520
Epoch 183/10000
10/10 [==============================] - 0s 2ms/sample - loss: 9.0525 - val_loss: 4.6028
Epoch 184/10000
10/10 [==============================] - 0s 2ms/sample - loss: 4.5236 - val_loss: 4.7290
Epoch 185/10000
10/10 [==============================] - 0s 2ms/sample - loss: 6.4690 - val_loss: 4.8303
Epoch 186/10000
10/10 [==============================] - 0s 3ms/sample - loss: 7.9935 - val_loss: 4.8551
Epoch 187/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.6640 - val_loss: 4.8905
Epoch 188/10000
10/10 [==============================] - 0s 4ms/sample - loss: 4.4829 - val_loss: 4.9007
Epoch 189/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.9850 - val_loss: 5.0342
Epoch 190/10000
10/10 [==============================] - 0s 3ms/sample - loss: 6.3140 - val_loss: 5.1645
Epoch 191/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.4750 - val_loss: 5.3488
Epoch 192/10000
10/10 [==============================] - 0s 2ms/sample - loss: 2.6345 - val_loss: 5.4999
Epoch 193/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.5065 - val_loss: 5.5510
Epoch 194/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.7708 - val_loss: 5.5049
Epoch 195/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.3741 - val_loss: 5.4328
Epoch 196/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.8362 - val_loss: 5.4115
Epoch 197/10000
10/10 [==============================] - 0s 2ms/sample - loss: 2.2571 - val_loss: 5.4056
Epoch 198/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.6784 - val_loss: 5.4883
Epoch 199/10000
10/10 [==============================] - 0s 2ms/sample - loss: 3.7899 - val_loss: 5.5690
Epoch 200/10000
10/10 [==============================] - 0s 2ms/sample - loss: 4.5475 - val_loss: 5.6441
Epoch 201/10000
10/10 [==============================] - 0s 4ms/sample - loss: 1.6664 - val_loss: 5.7060
Epoch 202/10000
10/10 [==============================] - 0s 2ms/sample - loss: 3.3776 - val_loss: 5.7563
Epoch 203/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.3715 - val_loss: 5.7626
Epoch 204/10000
10/10 [==============================] - 0s 5ms/sample - loss: 5.3483 - val_loss: 5.7741
Epoch 205/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.4117 - val_loss: 5.7354
Epoch 206/10000
10/10 [==============================] - 0s 4ms/sample - loss: 4.4160 - val_loss: 5.7467
Epoch 207/10000
10/10 [==============================] - 0s 4ms/sample - loss: 1.4017 - val_loss: 5.7137
Epoch 208/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.0564 - val_loss: 5.7246
Epoch 209/10000
10/10 [==============================] - 0s 2ms/sample - loss: 5.0177 - val_loss: 5.7377
Epoch 210/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.2537 - val_loss: 5.6550
Epoch 211/10000
10/10 [==============================] - 0s 3ms/sample - loss: 1.1080 - val_loss: 5.6057
Epoch 212/10000
10/10 [==============================] - 0s 3ms/sample - loss: 6.1889 - val_loss: 5.4392
Epoch 213/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.3121 - val_loss: 5.3124
Epoch 214/10000
10/10 [==============================] - 0s 4ms/sample - loss: 9.2832 - val_loss: 5.2957
Epoch 215/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.1953 - val_loss: 5.3381
Epoch 216/10000
10/10 [==============================] - 0s 2ms/sample - loss: 1.9972 - val_loss: 5.3499
Epoch 217/10000
10/10 [==============================] - 0s 2ms/sample - loss: 3.4604 - val_loss: 5.3861
Epoch 218/10000
10/10 [==============================] - 0s 3ms/sample - loss: 6.1390 - val_loss: 5.3609
Epoch 219/10000
10/10 [==============================] - 0s 4ms/sample - loss: 4.7748 - val_loss: 5.3997
Epoch 220/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.4252 - val_loss: 5.4241
Epoch 221/10000
10/10 [==============================] - 0s 4ms/sample - loss: 6.0188 - val_loss: 5.4597
Epoch 222/10000
10/10 [==============================] - 0s 4ms/sample - loss: 4.3615 - val_loss: 5.4185
Epoch 223/10000
10/10 [==============================] - 0s 3ms/sample - loss: 1.9270 - val_loss: 5.3500
Epoch 224/10000
10/10 [==============================] - 0s 4ms/sample - loss: 2.3528 - val_loss: 5.3336
Epoch 225/10000
10/10 [==============================] - 0s 4ms/sample - loss: 1.8491 - val_loss: 5.3132
Epoch 226/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.4122 - val_loss: 5.3765
Epoch 227/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.8787 - val_loss: 5.4168
Epoch 228/10000
10/10 [==============================] - 0s 3ms/sample - loss: 5.5563 - val_loss: 5.5324
Epoch 229/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.6599 - val_loss: 5.6310
Epoch 230/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.1630 - val_loss: 5.6896
Epoch 231/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.4373 - val_loss: 5.7370
Epoch 232/10000
10/10 [==============================] - 0s 4ms/sample - loss: 5.9558 - val_loss: 5.8705
Epoch 233/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.9929 - val_loss: 5.9280
Epoch 234/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.7781 - val_loss: 5.8959
Epoch 235/10000
10/10 [==============================] - 0s 4ms/sample - loss: 2.3628 - val_loss: 5.8377
Epoch 236/10000
10/10 [==============================] - 0s 5ms/sample - loss: 1.5815 - val_loss: 5.6724
Epoch 237/10000
10/10 [==============================] - 0s 5ms/sample - loss: 3.1624 - val_loss: 5.4194
Epoch 238/10000
10/10 [==============================] - 0s 4ms/sample - loss: 4.2845 - val_loss: 5.0964
Epoch 239/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.4818 - val_loss: 4.8878
Epoch 240/10000
10/10 [==============================] - 0s 4ms/sample - loss: 8.2040 - val_loss: 4.6327
Epoch 241/10000
10/10 [==============================] - 0s 4ms/sample - loss: 4.7838 - val_loss: 4.4670
Epoch 242/10000
10/10 [==============================] - 0s 4ms/sample - loss: 7.5573 - val_loss: 4.3614
Epoch 243/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.3145 - val_loss: 4.2890
Epoch 244/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.2518 - val_loss: 4.2379
Epoch 245/10000
10/10 [==============================] - 0s 4ms/sample - loss: 4.4274 - val_loss: 4.2912
Epoch 246/10000
10/10 [==============================] - 0s 4ms/sample - loss: 7.1205 - val_loss: 4.4755
Epoch 247/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.0261 - val_loss: 4.6343
Epoch 248/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.0437 - val_loss: 4.7442
Epoch 249/10000
10/10 [==============================] - 0s 5ms/sample - loss: 2.8544 - val_loss: 4.8347
Epoch 250/10000
10/10 [==============================] - 0s 4ms/sample - loss: 5.5443 - val_loss: 4.8459
Epoch 251/10000
10/10 [==============================] - 0s 4ms/sample - loss: 5.7942 - val_loss: 4.8126
Epoch 252/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.6866 - val_loss: 4.6610
Epoch 253/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.8190 - val_loss: 4.4952
Epoch 254/10000
10/10 [==============================] - 0s 4ms/sample - loss: 5.4349 - val_loss: 4.3298
Epoch 255/10000
10/10 [==============================] - 0s 3ms/sample - loss: 1.8611 - val_loss: 4.1192
Epoch 256/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.4444 - val_loss: 3.8884
Epoch 257/10000
10/10 [==============================] - 0s 5ms/sample - loss: 3.5641 - val_loss: 3.6963
Epoch 258/10000
10/10 [==============================] - 0s 4ms/sample - loss: 6.4726 - val_loss: 3.5360
Epoch 259/10000
10/10 [==============================] - 0s 5ms/sample - loss: 2.7579 - val_loss: 3.3745
Epoch 260/10000
10/10 [==============================] - 0s 4ms/sample - loss: 5.4685 - val_loss: 3.1690
Epoch 261/10000
10/10 [==============================] - 0s 4ms/sample - loss: 2.5827 - val_loss: 2.9848
Epoch 262/10000
10/10 [==============================] - 0s 3ms/sample - loss: 1.5628 - val_loss: 2.8440
Epoch 263/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.0238 - val_loss: 2.7493
Epoch 264/10000
10/10 [==============================] - 0s 4ms/sample - loss: 2.2930 - val_loss: 2.7403
Epoch 265/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.4510 - val_loss: 2.7080
Epoch 266/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.5719 - val_loss: 2.6696
Epoch 267/10000
10/10 [==============================] - 0s 4ms/sample - loss: 2.0813 - val_loss: 2.6210
Epoch 268/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.7269 - val_loss: 2.5076
Epoch 269/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.2209 - val_loss: 2.3545
Epoch 270/10000
10/10 [==============================] - 0s 4ms/sample - loss: 5.1032 - val_loss: 2.2520
Epoch 271/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.8840 - val_loss: 2.1774
Epoch 272/10000
10/10 [==============================] - 0s 4ms/sample - loss: 5.4838 - val_loss: 2.2979
Epoch 273/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.8008 - val_loss: 2.5898
Epoch 274/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.6314 - val_loss: 2.8989
Epoch 275/10000
10/10 [==============================] - 0s 4ms/sample - loss: 1.9287 - val_loss: 3.3246
Epoch 276/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.5540 - val_loss: 3.6815
Epoch 277/10000
10/10 [==============================] - 0s 4ms/sample - loss: 4.1406 - val_loss: 3.9201
Epoch 278/10000
10/10 [==============================] - 0s 4ms/sample - loss: 1.6898 - val_loss: 4.0779
Epoch 279/10000
10/10 [==============================] - 0s 4ms/sample - loss: 5.3380 - val_loss: 4.0014
Epoch 280/10000
10/10 [==============================] - 0s 2ms/sample - loss: 4.4874 - val_loss: 3.7870
Epoch 281/10000
10/10 [==============================] - 0s 2ms/sample - loss: 4.0347 - val_loss: 3.4907
Epoch 282/10000
10/10 [==============================] - 0s 2ms/sample - loss: 1.6396 - val_loss: 3.3202
Epoch 283/10000
10/10 [==============================] - 0s 4ms/sample - loss: 2.9481 - val_loss: 3.1144
Epoch 284/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.5946 - val_loss: 2.8250
Epoch 285/10000
10/10 [==============================] - 0s 4ms/sample - loss: 6.1609 - val_loss: 2.5304
Epoch 286/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.8852 - val_loss: 2.3421
Epoch 287/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.6779 - val_loss: 2.1970
Epoch 288/10000
10/10 [==============================] - 0s 4ms/sample - loss: 1.8492 - val_loss: 2.0640
Epoch 289/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.0824 - val_loss: 1.9287
Epoch 290/10000
10/10 [==============================] - 0s 4ms/sample - loss: 5.6614 - val_loss: 1.8609
Epoch 291/10000
10/10 [==============================] - 0s 4ms/sample - loss: 2.1666 - val_loss: 1.8002
Epoch 292/10000
10/10 [==============================] - 0s 2ms/sample - loss: 4.1877 - val_loss: 1.7531
Epoch 293/10000
10/10 [==============================] - 0s 4ms/sample - loss: 4.9911 - val_loss: 1.7065
Epoch 294/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.5175 - val_loss: 1.6108
Epoch 295/10000
10/10 [==============================] - 0s 4ms/sample - loss: 2.8701 - val_loss: 1.5405
Epoch 296/10000
10/10 [==============================] - 0s 4ms/sample - loss: 1.7621 - val_loss: 1.5181
Epoch 297/10000
10/10 [==============================] - 0s 4ms/sample - loss: 4.7070 - val_loss: 1.5001
Epoch 298/10000
10/10 [==============================] - 0s 4ms/sample - loss: 5.1772 - val_loss: 1.5161
Epoch 299/10000
10/10 [==============================] - 0s 4ms/sample - loss: 2.6092 - val_loss: 1.5827
Epoch 300/10000
10/10 [==============================] - 0s 4ms/sample - loss: 1.8928 - val_loss: 1.6959
Epoch 301/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.9288 - val_loss: 1.9145
Epoch 302/10000
10/10 [==============================] - 0s 4ms/sample - loss: 4.2146 - val_loss: 2.0714
Epoch 303/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.7340 - val_loss: 2.2725
Epoch 304/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.2618 - val_loss: 2.5243
Epoch 305/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.0224 - val_loss: 2.6777
Epoch 306/10000
10/10 [==============================] - 0s 3ms/sample - loss: 1.7954 - val_loss: 2.8237
Epoch 307/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.8379 - val_loss: 2.9721
Epoch 308/10000
10/10 [==============================] - 0s 2ms/sample - loss: 2.8336 - val_loss: 3.0599
Epoch 309/10000
10/10 [==============================] - 0s 3ms/sample - loss: 5.0559 - val_loss: 2.9644
Epoch 310/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.4666 - val_loss: 2.8234
Epoch 311/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.1165 - val_loss: 2.6216
Epoch 312/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.7452 - val_loss: 2.3750
Epoch 313/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.3883 - val_loss: 2.0843
Epoch 314/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.1700 - val_loss: 1.8359
Epoch 315/10000
10/10 [==============================] - 0s 4ms/sample - loss: 4.5547 - val_loss: 1.6425
Epoch 316/10000
10/10 [==============================] - 0s 2ms/sample - loss: 1.0840 - val_loss: 1.5382
Epoch 317/10000
10/10 [==============================] - 0s 2ms/sample - loss: 2.6065 - val_loss: 1.5722
Epoch 318/10000
10/10 [==============================] - 0s 2ms/sample - loss: 2.3514 - val_loss: 1.6898
Epoch 319/10000
10/10 [==============================] - 0s 2ms/sample - loss: 1.3608 - val_loss: 1.8340
Epoch 320/10000
10/10 [==============================] - 0s 3ms/sample - loss: 5.9581 - val_loss: 1.8996
Epoch 321/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.6909 - val_loss: 1.8794
Epoch 322/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.1808 - val_loss: 1.8512
Epoch 323/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.5149 - val_loss: 1.7829
Epoch 324/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.0846 - val_loss: 1.7136
Epoch 325/10000
10/10 [==============================] - 0s 2ms/sample - loss: 3.4084 - val_loss: 1.5878
Epoch 326/10000
10/10 [==============================] - 0s 4ms/sample - loss: 6.8555 - val_loss: 1.5437
Epoch 327/10000
10/10 [==============================] - 0s 2ms/sample - loss: 3.8435 - val_loss: 1.7035
Epoch 328/10000
10/10 [==============================] - 0s 3ms/sample - loss: 1.8877 - val_loss: 2.0108
Epoch 329/10000
10/10 [==============================] - 0s 4ms/sample - loss: 1.2347 - val_loss: 2.3874
Epoch 330/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.5892 - val_loss: 2.8159
Epoch 331/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.4971 - val_loss: 3.2540
Epoch 332/10000
10/10 [==============================] - 0s 4ms/sample - loss: 6.9412 - val_loss: 3.4782
Epoch 333/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.4468 - val_loss: 3.5936
Epoch 334/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.4724 - val_loss: 3.6054
Epoch 335/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.3250 - val_loss: 3.5422
Epoch 336/10000
10/10 [==============================] - 0s 4ms/sample - loss: 5.2566 - val_loss: 3.3225
Epoch 337/10000
10/10 [==============================] - 0s 3ms/sample - loss: 0.9721 - val_loss: 3.0582
Epoch 338/10000
10/10 [==============================] - 0s 4ms/sample - loss: 5.2571 - val_loss: 2.6631
Epoch 339/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.4737 - val_loss: 2.3059
Epoch 340/10000
10/10 [==============================] - 0s 3ms/sample - loss: 1.4078 - val_loss: 1.9843
Epoch 341/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.9809 - val_loss: 1.6462
Epoch 342/10000
10/10 [==============================] - 0s 4ms/sample - loss: 0.4124 - val_loss: 1.3630
Epoch 343/10000
10/10 [==============================] - 0s 4ms/sample - loss: 2.3918 - val_loss: 1.1619
Epoch 344/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.0149 - val_loss: 0.9785
Epoch 345/10000
10/10 [==============================] - 0s 5ms/sample - loss: 2.8478 - val_loss: 0.8504
Epoch 346/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.3618 - val_loss: 0.7658
Epoch 347/10000
10/10 [==============================] - 0s 4ms/sample - loss: 1.3622 - val_loss: 0.7307
Epoch 348/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.6945 - val_loss: 0.7747
Epoch 349/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.1708 - val_loss: 0.8537
Epoch 350/10000
10/10 [==============================] - 0s 2ms/sample - loss: 4.2667 - val_loss: 0.9420
Epoch 351/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.7507 - val_loss: 1.0385
Epoch 352/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.6245 - val_loss: 1.1891
Epoch 353/10000
10/10 [==============================] - 0s 3ms/sample - loss: 1.8386 - val_loss: 1.3740
Epoch 354/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.0444 - val_loss: 1.5872
Epoch 355/10000
10/10 [==============================] - 0s 4ms/sample - loss: 2.0828 - val_loss: 1.8135
Epoch 356/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.3077 - val_loss: 2.1134
Epoch 357/10000
10/10 [==============================] - 0s 4ms/sample - loss: 7.5324 - val_loss: 2.3526
Epoch 358/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.3116 - val_loss: 2.6655
Epoch 359/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.7130 - val_loss: 2.9254
Epoch 360/10000
10/10 [==============================] - 0s 4ms/sample - loss: 1.6205 - val_loss: 3.1279
Epoch 361/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.0548 - val_loss: 3.3237
Epoch 362/10000
10/10 [==============================] - 0s 3ms/sample - loss: 7.6475 - val_loss: 3.4007
Epoch 363/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.5066 - val_loss: 3.5414
Epoch 364/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.5464 - val_loss: 3.6312
Epoch 365/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.2426 - val_loss: 3.6694
Epoch 366/10000
10/10 [==============================] - 0s 4ms/sample - loss: 4.3178 - val_loss: 3.5838
Epoch 367/10000
10/10 [==============================] - 0s 4ms/sample - loss: 7.4683 - val_loss: 3.4344
Epoch 368/10000
10/10 [==============================] - 0s 4ms/sample - loss: 2.4325 - val_loss: 3.2083
Epoch 369/10000
10/10 [==============================] - 0s 3ms/sample - loss: 5.2775 - val_loss: 2.9085
Epoch 370/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.9456 - val_loss: 2.5870
Epoch 371/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.2509 - val_loss: 2.2620
Epoch 372/10000
10/10 [==============================] - 0s 3ms/sample - loss: 6.4858 - val_loss: 1.8474
Epoch 373/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.5038 - val_loss: 1.5077
Epoch 374/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.5176 - val_loss: 1.2438
Epoch 375/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.5421 - val_loss: 1.0932
Epoch 376/10000
10/10 [==============================] - 0s 6ms/sample - loss: 2.3812 - val_loss: 1.0184
Epoch 377/10000
10/10 [==============================] - 0s 3ms/sample - loss: 1.4883 - val_loss: 0.9816
Epoch 378/10000
10/10 [==============================] - 0s 4ms/sample - loss: 1.5651 - val_loss: 1.0026
Epoch 379/10000
10/10 [==============================] - 0s 4ms/sample - loss: 2.4485 - val_loss: 1.0224
Epoch 380/10000
10/10 [==============================] - 0s 5ms/sample - loss: 2.3565 - val_loss: 1.0686
Epoch 381/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.5284 - val_loss: 1.1438
Epoch 382/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.1415 - val_loss: 1.1940
Epoch 383/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.2649 - val_loss: 1.3527
Epoch 384/10000
10/10 [==============================] - 0s 3ms/sample - loss: 1.4092 - val_loss: 1.5700
Epoch 385/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.0688 - val_loss: 1.8013
Epoch 386/10000
10/10 [==============================] - 0s 3ms/sample - loss: 1.9037 - val_loss: 1.9706
Epoch 387/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.6661 - val_loss: 1.9860
Epoch 388/10000
10/10 [==============================] - 0s 3ms/sample - loss: 1.5162 - val_loss: 2.0182
Epoch 389/10000
10/10 [==============================] - 0s 3ms/sample - loss: 7.4289 - val_loss: 2.0692
Epoch 390/10000
10/10 [==============================] - 0s 3ms/sample - loss: 1.7323 - val_loss: 2.1549
Epoch 391/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.6270 - val_loss: 2.2877
Epoch 392/10000
10/10 [==============================] - 0s 3ms/sample - loss: 4.8452 - val_loss: 2.2921
Epoch 393/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.5424 - val_loss: 2.2075
Epoch 394/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.4097 - val_loss: 2.1066
Epoch 395/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.9300 - val_loss: 1.9040
Epoch 396/10000
10/10 [==============================] - 0s 3ms/sample - loss: 1.3795 - val_loss: 1.7534
Epoch 397/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.3063 - val_loss: 1.6482
Epoch 398/10000
10/10 [==============================] - 0s 3ms/sample - loss: 1.8380 - val_loss: 1.5560
Epoch 399/10000
10/10 [==============================] - 0s 3ms/sample - loss: 1.6168 - val_loss: 1.4785
Epoch 400/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.6511 - val_loss: 1.4566
Epoch 401/10000
10/10 [==============================] - 0s 3ms/sample - loss: 0.4469 - val_loss: 1.4576
Epoch 402/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.2193 - val_loss: 1.4366
Epoch 403/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.1179 - val_loss: 1.4406
Epoch 404/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.4198 - val_loss: 1.4093
Epoch 405/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.3760 - val_loss: 1.3969
Epoch 406/10000
10/10 [==============================] - 0s 3ms/sample - loss: 1.8220 - val_loss: 1.3608
Epoch 407/10000
10/10 [==============================] - 0s 4ms/sample - loss: 4.0135 - val_loss: 1.3193
Epoch 408/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.8393 - val_loss: 1.2693
Epoch 409/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.3548 - val_loss: 1.2411
Epoch 410/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.0766 - val_loss: 1.2247
Epoch 411/10000
10/10 [==============================] - 0s 4ms/sample - loss: 0.9160 - val_loss: 1.2176
Epoch 412/10000
10/10 [==============================] - 0s 3ms/sample - loss: 5.1368 - val_loss: 1.2154
Epoch 413/10000
10/10 [==============================] - 0s 3ms/sample - loss: 1.7981 - val_loss: 1.2297
Epoch 414/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.2383 - val_loss: 1.2020
Epoch 415/10000
10/10 [==============================] - 0s 4ms/sample - loss: 1.7330 - val_loss: 1.1848
Epoch 416/10000
10/10 [==============================] - 0s 2ms/sample - loss: 3.6279 - val_loss: 1.1652
Epoch 417/10000
10/10 [==============================] - 0s 3ms/sample - loss: 1.0529 - val_loss: 1.1399
Epoch 418/10000
10/10 [==============================] - 0s 3ms/sample - loss: 1.6353 - val_loss: 1.1454
Epoch 419/10000
10/10 [==============================] - 0s 2ms/sample - loss: 3.0054 - val_loss: 1.1988
Epoch 420/10000
10/10 [==============================] - 0s 2ms/sample - loss: 4.9786 - val_loss: 1.3031
Epoch 421/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.6656 - val_loss: 1.3451
Epoch 422/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.1969 - val_loss: 1.3335
Epoch 423/10000
10/10 [==============================] - 0s 2ms/sample - loss: 3.1168 - val_loss: 1.3054
Epoch 424/10000
10/10 [==============================] - 0s 3ms/sample - loss: 1.7924 - val_loss: 1.3018
Epoch 425/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.7915 - val_loss: 1.3233
Epoch 426/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.7464 - val_loss: 1.2899
Epoch 427/10000
10/10 [==============================] - 0s 4ms/sample - loss: 4.5211 - val_loss: 1.2153
Epoch 428/10000
10/10 [==============================] - 0s 4ms/sample - loss: 1.8608 - val_loss: 1.1641
Epoch 429/10000
10/10 [==============================] - 0s 4ms/sample - loss: 2.7048 - val_loss: 1.1333
Epoch 430/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.9834 - val_loss: 1.0699
Epoch 431/10000
10/10 [==============================] - 0s 3ms/sample - loss: 1.7967 - val_loss: 1.0513
Epoch 432/10000
10/10 [==============================] - 0s 4ms/sample - loss: 1.5234 - val_loss: 1.0858
Epoch 433/10000
10/10 [==============================] - 0s 4ms/sample - loss: 2.3512 - val_loss: 1.1487
Epoch 434/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.5467 - val_loss: 1.2943
Epoch 435/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.6998 - val_loss: 1.4156
Epoch 436/10000
10/10 [==============================] - 0s 4ms/sample - loss: 1.2618 - val_loss: 1.5548
Epoch 437/10000
10/10 [==============================] - 0s 2ms/sample - loss: 2.5075 - val_loss: 1.6781
Epoch 438/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.2344 - val_loss: 1.7777
Epoch 439/10000
10/10 [==============================] - 0s 4ms/sample - loss: 1.7799 - val_loss: 1.7719
Epoch 440/10000
10/10 [==============================] - 0s 3ms/sample - loss: 3.9937 - val_loss: 1.6876
Epoch 441/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.9029 - val_loss: 1.5336
Epoch 442/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.0352 - val_loss: 1.4024
Epoch 443/10000
10/10 [==============================] - 0s 4ms/sample - loss: 3.6164 - val_loss: 1.3005
Epoch 444/10000
10/10 [==============================] - 0s 3ms/sample - loss: 2.6523 - val_loss: 1.3273
Epoch 445/10000
10/10 [==============================] - 0s 4ms/sample - loss: 2.4032 - val_loss: 1.3249
Epoch 446/10000
10/10 [==============================] - 0s 2ms/sample - loss: 1.7055 - val_loss: 1.3485
Epoch 447/10000
10/10 [==============================] - 0s 4ms/sample - loss: 2.0014 - val_loss: 1.4057
Out[120]:
<tensorflow.python.keras.callbacks.History at 0x7fd41c236e10>
In [121]:
model.evaluate(scaler.transform(Xtest), ytest)
4/4 [==============================] - 0s 3ms/sample - loss: 4.7599
Out[121]:
4.7599077224731445
In [122]:
plt.scatter(y, model.predict(scaler.transform(X)))
Out[122]:
<matplotlib.collections.PathCollection at 0x7fd41073d550>

polished vs unpolished surface visualization

In [1]:
%load_ext autoreload
%autoreload 2
%reset -f
from imports import *
In [2]:
polished_data = joblib.load("/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V03_T1_Left(Bottom)_500X_3D.info")
In [3]:
asbuilt_data = joblib.load("/home/ben_rasoolov/additive_project/data/experiment_03/V03_T1_Left(Bottom)_500X_3D.info")
In [4]:
polished_im = np.array(polished_data['value'].x)
asbuilt_im = np.array(asbuilt_data['value'].x)
In [5]:
polished_im.shape, asbuilt_im.shape
Out[5]:
((6844, 7711), (6938, 7692))
In [6]:
d = 100
t = 50
a_pol = polished_im[2000:-2000, 2000:-2000]
a_pol -= a_pol.mean()
w, h = a_pol.shape
a_asb  = asbuilt_im[2000+t:2000+t+w, 2000+d:2000+d+h]
a_asb -= a_asb.mean()
In [67]:
%matplotlib inline
fig, axes = plt.subplots(1, 2, figsize=(16, 5))
axes = axes.reshape(-1)
c = axes[1].imshow(a_pol, cmap='jet')
axes[1].set_title('Polished Image')
cax = fig.add_axes([0.89, 0.1, 0.02, 0.78])
fig.colorbar(c, cax=cax)
c = axes[0].imshow(a_asb, cmap='jet')
axes[0].set_title('As-built Image')
cax = fig.add_axes([0.47, 0.1, 0.02, 0.78])
fig.colorbar(c, cax=cax)
plt.savefig("/home/ben_rasoolov/additive_project/data/paper/figures/asbuilt_vs_polished_2d.png", dpi=300)
In [8]:
def min_max_scale(x, a=0, b=1):
    mn, mx = x.min(), x.max()
    rng = b - a
    out = (x - mn)/(mx-mn)
    return out * rng + a
In [68]:
a_pol_equalized = cv2.equalizeHist(min_max_scale(a_pol, 0, 255).astype('uint8'))
# a_pol_equalized = min_max_scale(a_pol_equalized, 0, a_pol.max()-a_pol.min())
a_asb_equalized = cv2.equalizeHist(min_max_scale(a_asb, 0, 255).astype('uint8'))
# a_asb_equalized = min_max_scale(a_asb_equalized, 0, a_asb.max()-a_asb.min())
%matplotlib inline
fig, axes = plt.subplots(1, 2, figsize=(16, 5))
axes = axes.reshape(-1)
c = axes[1].imshow(a_pol_equalized)
axes[1].set_title('Polished Image')
cax = fig.add_axes([0.89, 0.1, 0.02, 0.78])
fig.colorbar(c, cax=cax)
c = axes[0].imshow(a_asb_equalized)
axes[0].set_title('As-built Image')
cax = fig.add_axes([0.47, 0.1, 0.02, 0.78])
fig.colorbar(c, cax=cax)
#plt.savefig("/home/ben_rasoolov/additive_project/data/paper/figures/asbuilt_vs_polished_2d.png", dpi=300)
Out[68]:
<matplotlib.colorbar.Colorbar at 0x7f02ba903c50>
In [70]:
from scipy.stats import mode
from additive.feature_functions import feature_functions_functions as feat_funs
def image_stats(x):
    funs = {k: v for k, v in feat_funs.items() if '_2d' in k}
    return compute({k: delayed(v)(x) for k, v in funs.items()})[0]
In [71]:
image_stats(a_pol), image_stats(a_asb)
Out[71]:
({'ra_2d': 7.9348383,
  'rq_2d': 9.909723,
  'rp_2d': 15.458671,
  'rv_2d': 43.33512,
  'rsk_2d': -1.160044018531971,
  'rku_2d': 3.6342245470620482,
  'mode_2d': 7.0},
 {'ra_2d': 18.053083,
  'rq_2d': 22.491354,
  'rp_2d': 72.2355,
  'rv_2d': 76.20617,
  'rsk_2d': -0.12702182479337154,
  'rku_2d': 2.798468140554855,
  'mode_2d': 3.0})
In [72]:
image_stats(a_pol_equalized), image_stats(a_asb_equalized)
Out[72]:
({'ra_2d': 64.3602491959488,
  'rq_2d': 74.13747509255109,
  'rp_2d': 126.21494484978516,
  'rv_2d': 128.78505515021484,
  'rsk_2d': -0.01622937017480912,
  'rku_2d': 1.7819690858167678,
  'mode_2d': 58.0},
 {'ra_2d': 63.75106623742168,
  'rq_2d': 73.62104996442443,
  'rp_2d': 126.52297830868127,
  'rv_2d': 128.47702169131873,
  'rsk_2d': -0.01323404393625666,
  'rku_2d': 1.8001987190128763,
  'mode_2d': 9.0})
In [66]:
plt.hist(a_pol.reshape(-1), alpha=.2)
plt.plot([a_pol.mean(), a_pol.mean()], [0, 4e6])
plt.hist(a_asb.reshape(-1), alpha=.2)
plt.plot([a_asb.mean(), a_pol.mean()], [0, 4e6])
Out[66]:
[<matplotlib.lines.Line2D at 0x7f02a7b42e48>]
In [69]:
plt.hist(a_pol_equalized.reshape(-1), alpha=.2)
plt.hist(a_asb_equalized.reshape(-1), alpha=.2)
Out[69]:
(array([1014412., 1039323., 1081541., 1031140., 1056441., 1004794.,
        1122009., 1006041., 1124585., 1073798.]),
 array([  0. ,  25.5,  51. ,  76.5, 102. , 127.5, 153. , 178.5, 204. ,
        229.5, 255. ]),
 <a list of 10 Patch objects>)

polished vs unpolished surface visualization

In [60]:
%load_ext autoreload
%autoreload 2
%reset -f
from imports import *
The autoreload extension is already loaded. To reload it, use:
  %reload_ext autoreload
In [62]:
polished_data = joblib.load("../data/experiment_03/Polished_V03_T1_Left(Bottom)_500X_3D.info")
In [63]:
asbuilt_data = joblib.load("../data/experiment_03/V03_T1_Left(Bottom)_500X_3D.info")
In [72]:
polished_im = np.array(polished_data['value'].x)
asbuilt_im = np.array(asbuilt_data)
In [73]:
polished_im.shape, asbuilt_im.shape
Out[73]:
((6844, 7711), (6939, 7692))
In [74]:
d = 100
t = 50
a_pol = polished_im[2000:-2000, 2000:-2000]
a_pol -= a_pol.mean()
w, h = a_pol.shape
a_asb  = asbuilt_im[2000+t:2000+t+w, 2000+d:2000+d+h]
a_asb -= a_asb.mean()
In [76]:
%matplotlib inline
fig, axes = plt.subplots(1, 2, figsize=(16, 5))
axes = axes.reshape(-1)
c = axes[1].imshow(a_pol, cmap='jet')
axes[1].set_title('Polished Image')
cax = fig.add_axes([0.89, 0.1, 0.02, 0.78])
fig.colorbar(c, cax=cax)
c = axes[0].imshow(a_asb, cmap='jet')
axes[0].set_title('As-built Image')
cax = fig.add_axes([0.47, 0.1, 0.02, 0.78])
fig.colorbar(c, cax=cax)
plt.savefig("../data/paper/figures/asbuilt_vs_polished_2d.png", dpi=300)
In [77]:
from additive.experimental import plot_3d_surface
In [81]:
%matplotlib notebook
fig, axes = plt.subplots(figsize=(8, 8))
ax = fig.gca(projection='3d')
plot_3d_surface(polished_im[2000:-2000, 2000:-2000], step=30, ax=ax, cmap='jet')
plot_3d_surface(asbuilt_im[2000+t:-2000+t, 2000+d:-2000+d]+200, step=30, ax=ax, cmap='jet')
/home/bzr0014/miniconda3/envs/additive/lib/python3.6/site-packages/ipykernel_launcher.py:3: UserWarning: Requested projection is different from current axis projection, creating new axis with requested projection.
  This is separate from the ipykernel package so we can avoid doing imports until
Out[81]:
<mpl_toolkits.mplot3d.art3d.Poly3DCollection at 0x7f8262bcd438>
In [78]:
plt.savefig("/home/ben_rasoolov/additive_project/data/paper/figures/asbuilt_vs_polished_3d.png", dpi=300)

image comparison before and after tilt

In [26]:
%reset -f
%load_ext autoreload
%autoreload 2
from imports import *
The autoreload extension is already loaded. To reload it, use:
  %reload_ext autoreload
In [30]:
data = joblib.load("../data/experiment_03/V01_T2_Left(Bottom)_500X_3D.info")
image = np.array(data['value'].x)
In [31]:
from scipy.ndimage import rotate
In [32]:
angle = 1.5
rotated = rotate(image, -angle)
In [33]:
double_rotated = rotate(rotated, angle)
In [34]:
fig, axes = plt.subplots(1, 2, figsize=(16, 10))

axes[0].imshow(rotated[150:-180, 500:-500], cmap='jet')
axes[1].imshow(double_rotated[150:-180, 500:-500], cmap='jet')

axes[0].set_title("Before alignment")
axes[1].set_title("After alignment")
plt.savefig("../data/paper/figures/before_after_alignment.png", dpi=300)
In [155]:
%reset -f
%load_ext autoreload
%autoreload 2
from imports import *
The autoreload extension is already loaded. To reload it, use:
  %reload_ext autoreload
In [164]:
from additive.preprocessing import load_and_process_image
data = joblib.load("/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V17_T1_Left(Bottom)_500X_3D.info")
image = load_and_process_image(np.array(data['value'].x), crop_size=[1000, 300])
image.shape
[ 4.24200258e-03  5.02962525e-04 -6.69237197e-07  6.97323158e-08]
Out[164]:
(4708, 7164)
In [165]:
from scipy.stats import mode
modes = mode(image, axis=1)
print(mode(image.reshape(-1)))
print(modes.mode.mean())
ModeResult(mode=array([63.40149692]), count=array([1]))
77.52556115523079
In [168]:
image.mean()
Out[168]:
111.23551867914627
In [166]:
from additive.preprocessing import load_and_process_image
data2 = joblib.load("/home/ben_rasoolov/additive_project/data/experiment_03/V17_T1_Left(Bottom)_500X_3D.info")
image2 = load_and_process_image(np.array(data2['value'].x), crop_size=[1000, 300])
image2.shape
[ 4.73096181e-03  3.18209739e-03 -3.68233728e-07 -1.43032627e-07]
Out[166]:
(5270, 7133)
In [167]:
modes2 = mode(image2, axis=1)
print(mode(image2.reshape(-1)))
print(modes2.mode.mean())
ModeResult(mode=array([63.40149692]), count=array([1]))
77.52556115523079
In [170]:
image2.mean()
Out[170]:
134.20637609212093

Finding alternate measures instead of rv

In [264]:
%load_ext autoreload
%autoreload 2
%reset -f
import numpy as np
import joblib
import glob
from imports import *
from additive.feature_functions import feature_functions_functions as feature_funs
from additive.features import Features
from additive.preprocessing import load_and_process_image
from functools import reduce
from operator import or_
The autoreload extension is already loaded. To reload it, use:
  %reload_ext autoreload
In [265]:
def isin(values, text, ignore_case=False):
    if ignore_case:
        return reduce(or_, [v.lower() in text.lower() for v in values])
    return reduce(or_, [v in text for v in values])
In [266]:
################### ################### ################### ###################
## version 1: all files
files = glob.glob("/home/ben_rasoolov/additive_project/data/experiment_03/*info")
chosen_files = [f for f in files if isin(['v18', 'v12', 'v14', 'v13', 'v11', 'v17'], f, ignore_case=True)]
print(len(chosen_files))
## version 2: v19 repeated measurements
################### ################### ################### ###################
#info_files = glob.glob("/home/ben_rasoolov/additive_project/data/experiment_03//*_2.info")
#chosen_files = info_files + [x.replace("_2", "") for x in info_files]
#len(chosen_files)
36
In [267]:
def get_features(img, features=None):
    if features is None:
        return {feature: fun(img) for feature, fun in feature_funs.items()}
    return {feature: fun(img) for feature, fun in feature_funs.items() if feature in features}
In [268]:
from multiprocessing import Pool
import dask
def get_features_from_files(chosen_files, n_procs=8):
    images = bag.from_sequence(chosen_files)
    preprocessed_images = images.map(load_and_process_image,
                                 transform_fun=lambda data: np.array(data['value'].x), crop_size=(1000, 1000))
    features = preprocessed_images.map(get_features, features=['mode_1d', 'mode_2d'])
    with Pool(min(len(chosen_files), n_procs)) as p:
        with dask.config.set(pool=p):
            out = features.compute()
    return out
In [269]:
out = get_features_from_files(chosen_files)
[-1.84533448e-03 -1.27956283e-03 -4.12078325e-08  4.52750129e-07]
[-8.24888035e-03  1.00893437e-04  1.61808121e-06  2.66632992e-08]
[ 6.39308151e-03  9.64994346e-04 -1.61631966e-06  2.64438613e-07]
[ 5.10678881e-03  5.53814916e-03 -1.32255078e-06 -7.09115455e-07]
[ 3.24635158e-05  2.59171675e-03 -1.36681455e-07 -1.31693280e-07]
[ 2.51441074e-03  3.02209347e-04 -5.80143911e-07  2.14329337e-07]
[ 1.23607882e-02  3.53808873e-04 -2.45074666e-06  1.70360176e-07]
[ 6.71770003e-03  8.55413852e-04 -7.95545896e-07  9.61246166e-08]
[ 5.99869710e-03  6.95888174e-04 -7.32616226e-07 -1.09445230e-07]
[ 1.64123615e-03 -4.81552039e-04 -5.52474804e-08  6.06446915e-07]
[ 8.50691820e-03  9.23481048e-05 -6.66033235e-07  1.55589464e-07]
[-5.26252185e-03 -9.54362747e-04  8.90394090e-07 -2.55008535e-07]
[-5.62704148e-04  4.70625311e-03 -2.46238812e-07 -3.30174275e-07]
[ 4.51812087e-03  9.04602374e-04 -7.14998299e-07 -2.27230614e-08]
[ 6.44893670e-03  2.29792359e-03 -7.18796178e-07  7.39273938e-08]
[ 6.80579614e-03 -8.53632388e-04 -1.07001636e-06  2.04509679e-07]
[ 7.18111943e-03  9.94925092e-05 -1.21751462e-06 -4.71012658e-08]
[ 7.65874365e-03  5.68406297e-04 -1.75450165e-06  1.58949585e-07]
[ 4.79445712e-03  1.50709877e-03 -6.49792125e-07  2.21263064e-08]
[ 1.11043671e-02 -1.66032907e-03 -1.84871626e-07  4.93687933e-07]
[ 5.14345980e-03  6.25785031e-04 -1.52776770e-07  3.14698605e-07]
[-5.24367591e-03 -5.28258110e-03  8.53856155e-07  8.38930891e-07]
[ 5.43355412e-03  1.03572157e-03  9.51082679e-07 -4.30792509e-07]
[ 5.93875824e-03 -8.48181466e-04 -1.59176964e-06  4.09212008e-07]
[ 7.23076862e-04 -1.10803748e-03 -6.92895599e-07  2.21599029e-07]
[ 4.31363686e-03  3.94946243e-03 -2.75582316e-07 -2.85973944e-07]
[ 8.73613730e-03 -3.86011533e-03 -2.01886654e-06  6.43945725e-07]
[ 8.69794059e-05 -6.97352787e-04  8.28393421e-09  3.01878079e-07]
[ 1.32092672e-02  2.90227562e-03 -2.42574723e-06 -2.20383985e-07]
[ 3.87444590e-03  4.00024715e-04 -7.70394194e-07  1.22238407e-07]
[ 6.39862835e-03 -5.04947990e-04 -1.55421487e-06  1.79873193e-07]
[ 1.32092672e-02  2.90227562e-03 -2.42574723e-06 -2.20383985e-07]
[ 1.23607882e-02  3.53808873e-04 -2.45074666e-06  1.70360176e-07]
[ 3.58372382e-03  1.04678422e-03 -1.07018490e-06 -7.92145042e-08]
[ 1.01787948e-03  1.55345496e-03 -1.34250305e-07 -2.13714727e-07]
[-5.56934880e-05 -2.13949278e-03  5.28063580e-08  5.27976408e-07]
In [270]:
import pandas as pd
from additive.utility import get_file_info
df = pd.DataFrame(dict(zip(chosen_files, out))).T
df.index = df.index.map(lambda x: x.split("/")[-1].split(".")[0]).rename("file")
df = df.reset_index()
df = get_file_info(df['file']).join(df)
df
Out[270]:
ispolished specimen T RL file mode_1d mode_2d
0 False V17 T2 R V17_T2_Right(Top)_500X_3D 2.614834 1.0
1 False V13 T1 L V13_T1_Left(Bottom)_500X_3D -0.824901 -3.0
2 True V11 T1 L Polished_V11_T1_Left(Bottom)_500X_3D 3.644151 3.0
3 False V11 T2 R V11_T2_Right(Top)_500X_3D 0.680842 4.0
4 False V12 T1 R V12_T1_Right(Top)_500X_3D 1.686387 1.0
5 False V17 T2 L V17_T2_Left(Bottom)_500X_3D 1.876017 1.0
6 False V14 T2 L V14_T2_Left(Bottom)_500X_3D -6.945461 -8.0
7 False V13 T2 R V13_T2_Right(Top)_500X_3D 9.703917 10.0
8 False V18 T2 L V18_T2_Left(Bottom)_500X_3D 2.239669 -5.0
9 True V11 T1 R Polished_V11_T1_Right(Top)_500X_3D 5.197160 5.0
10 False V13 T1 R V13_T1_Right(Top)_500X_3D -0.450307 2.0
11 False V14 T1 L V14_T1_Left(Bottom)_500X_3D -1.882596 -3.0
12 False V18 T2 R V18_T2_Right(Top)_500X_3D 3.942809 -11.0
13 False V11 T2 L V11_T2_Left(Bottom)_500X_3D 1.373035 5.0
14 True V17 T1 L Polished_V17_T1_Left(Bottom)_500X_3D 7.673534 16.0
15 True V11 T2 R Polished_V11_T2_Right(Top)_500X_3D 7.300348 7.0
16 True V11 T2 L Polished_V11_T2_Left(Bottom)_500X_3D 7.400737 8.0
17 False V17 T1 R V17_T1_Right(Top)_500X_3D 1.115155 0.0
18 True V13 T1 R Polished_V13_T1_Right(Top)_500X_3D 13.472081 14.0
19 False V11 T1 L V11_T1_Left(Bottom)_500X_3D -0.445094 1.0
20 False V11 T1 R V11_T1_Right(Top)_500X_3D -3.276441 2.0
21 False V12 T2 R V12_T2_Right(Top)_500X_3D 0.882377 2.0
22 False V14 T1 R V14_T1_Right(Top)_500X_3D -1.255964 -2.0
23 False V12 T1 L V12_T1_Left(Bottom)_500X_3D 1.010206 4.0
24 True V17 T2 L Polished_V17_T2_Left(Bottom)_500X_3D 15.587376 17.0
25 False V17 T1 L V17_T1_Left(Bottom)_500X_3D 2.741746 1.0
26 False V18 T1 R V18_T1_Right(Top)_500X_3D 1.988014 -4.0
27 False V12 T2 L V12_T2_Left(Bottom)_500X_3D 2.044061 7.0
28 True V17 T1 R Polished_V17_T1_Right(Top)_500X_3D 13.476657 17.0
29 True V13 T2 L Polished_V13_T2_Left(Bottom)_500X_3D 14.208404 15.0
30 True V13 T1 L Polished_V13_T1_Left(Bottom)_500X_3D 11.527866 12.0
31 False V13 T2 L V13_T2_Left(Bottom)_500X_3D 14.208404 15.0
32 True V13 T2 R Polished_V13_T2_Right(Top)_500X_3D 9.703917 10.0
33 True V17 T2 R Polished_V17_T2_Right(Top)_500X_3D 18.612027 21.0
34 False V14 T2 R V14_T2_Right(Top)_500X_3D -5.391444 -4.0
35 False V18 T1 L V18_T1_Left(Bottom)_500X_3D 1.451263 0.0
In [275]:
# orig = pd.read_csv("/home/ben_rasoolov/additive_project/data/paper/data/global_stats_tilted_rotated_cropped_v04.csv")
# df4 = orig.drop(['mode_1d', 'mode_2d'], axis=1).merge(df, on=['ispolished', 'specimen', 'T', 'RL'])
# df4.to_csv("/home/ben_rasoolov/additive_project/data/paper/data/global_stats_tilted_rotated_cropped_v04.csv", index=False)
In [557]:
orig[orig.ispolished & orig['specimen'].isin({'V11', 'V13', 'V17'})].groupby(['specimen']).max()
Out[557]:
ispolished T RL file_x ra_1d rq_1d rsk_1d rku_1d rp_1d rv_1d ra_2d rq_2d rp_2d rv_2d rsk_2d rku_2d file_y mode_1d mode_2d
specimen
V11 True T2 R Polished_V11_T2_Right(Top)_500X_3D 7.958603 9.857359 -1.368657 7.491663 10.520284 36.089186 8.072428 10.150749 39.829623 67.529944 -1.225809 6.331805 Polished_V11_T2_Right(Top)_500X_3D 7.400737 8.0
V13 True T2 R Polished_V13_T2_Right(Top)_500X_3D 13.283742 15.626065 -0.751848 4.620403 17.205495 46.826339 13.412949 15.919650 21.471980 72.756115 -0.710786 4.410962 Polished_V13_T2_Right(Top)_500X_3D 14.208404 15.0
V17 True T2 R Polished_V17_T2_Right(Top)_500X_3D 13.642760 16.134591 -0.212123 2.727019 22.267575 45.397845 13.802137 16.466252 77.231297 62.059206 -0.202740 2.757794 Polished_V17_T2_Right(Top)_500X_3D 18.612027 21.0
In [558]:
orig = pd.read_csv("/home/ben_rasoolov/additive_project/data/paper/data/global_stats_tilted_rotated_cropped_v04.csv")
cols_to_drop = [x for x in orig.columns if x.endswith('_2d')] + ['file_x', 'file_y']
tmp = orig.drop(cols_to_drop, axis=1)
In [560]:
res = tmp[tmp['ispolished']].merge(tmp[~tmp['ispolished']], on=['specimen', 'T', 'RL'])\
    .drop(['ispolished_x', 'ispolished_y'], axis=1)
res
Out[560]:
specimen T RL ra_1d_x rq_1d_x rsk_1d_x rku_1d_x rp_1d_x rv_1d_x mode_1d_x ra_1d_y rq_1d_y rsk_1d_y rku_1d_y rp_1d_y rv_1d_y mode_1d_y
0 V11 T1 L 4.527431 6.208072 -2.083207 7.491663 5.929927 28.451027 3.644151 19.792682 24.456594 0.231876 2.865466 72.894686 56.415843 -0.445094
1 V11 T1 R 6.192697 7.930921 -1.656373 5.331377 7.676874 31.480531 5.197160 20.674016 25.587523 0.351087 2.876343 75.648138 55.552691 -3.276441
2 V17 T1 L 10.498238 12.427238 -0.212123 2.207155 21.022304 33.709958 7.673534 16.047365 20.188340 -0.093679 2.992685 56.615596 55.442637 2.741746
3 V11 T2 R 7.913196 9.857359 -1.438924 4.422782 10.449421 35.860442 7.300348 19.051853 23.286449 0.069212 2.556587 60.847256 55.875495 0.680842
4 V11 T2 L 7.958603 9.815106 -1.368657 4.341406 10.520284 36.089186 7.400737 18.695940 22.990538 0.059173 2.640564 62.974622 55.797110 1.373035
5 V13 T1 R 12.156369 14.046193 -0.751848 2.459677 16.153497 39.609140 13.472081 21.668242 26.974085 0.103231 2.875700 74.689298 74.596831 -0.450307
6 V17 T2 L 11.992086 14.335027 -0.648876 2.727019 20.433179 43.701991 15.587376 17.995050 22.312795 -0.019567 2.769843 58.561165 61.415199 1.876017
7 V17 T1 R 11.030199 12.974124 -0.353556 2.234523 19.664089 34.625986 13.476657 16.407634 20.874124 -0.021723 3.171323 61.134055 58.394124 1.115155
8 V13 T2 L 13.283742 15.626065 -0.923787 2.826922 17.205495 46.826339 14.208404 13.283742 15.626065 -0.923787 2.826922 17.205495 46.826339 14.208404
9 V13 T1 L 10.849642 12.911247 -1.011653 3.079410 13.918803 39.674545 11.527866 21.104536 26.193603 0.032069 2.879431 70.324724 74.886901 -0.824901
10 V13 T2 R 9.673651 12.161879 -1.451079 4.620403 11.913197 45.538838 9.703917 9.673651 12.161879 -1.451079 4.620403 11.913197 45.538838 9.703917
11 V17 T2 R 13.642760 16.134591 -0.527518 2.425814 22.267575 45.397845 18.612027 20.367282 25.141885 -0.043377 2.692481 66.080730 66.391047 2.614834
In [549]:
y_cols = cols = "ra_1d_y 	rq_1d_y 	rsk_1d_y 	rku_1d_y 	rp_1d_y 	rv_1d_y 	mode_1d_y rv_1d_y".split()
tmp = res[y_cols].drop(10)
tmp
Out[549]:
ra_1d_x rq_1d_x rsk_1d_x rku_1d_x rp_1d_x rv_1d_x mode_1d_x rv_1d_y
0 4.527431 6.208072 -2.083207 7.491663 5.929927 28.451027 3.644151 56.415843
1 6.192697 7.930921 -1.656373 5.331377 7.676874 31.480531 5.197160 55.552691
2 10.498238 12.427238 -0.212123 2.207155 21.022304 33.709958 7.673534 55.442637
3 7.913196 9.857359 -1.438924 4.422782 10.449421 35.860442 7.300348 55.875495
4 7.958603 9.815106 -1.368657 4.341406 10.520284 36.089186 7.400737 55.797110
5 12.156369 14.046193 -0.751848 2.459677 16.153497 39.609140 13.472081 74.596831
6 11.992086 14.335027 -0.648876 2.727019 20.433179 43.701991 15.587376 61.415199
7 11.030199 12.974124 -0.353556 2.234523 19.664089 34.625986 13.476657 58.394124
8 13.283742 15.626065 -0.923787 2.826922 17.205495 46.826339 14.208404 46.826339
9 10.849642 12.911247 -1.011653 3.079410 13.918803 39.674545 11.527866 74.886901
11 13.642760 16.134591 -0.527518 2.425814 22.267575 45.397845 18.612027 66.391047
In [522]:
x_cols = 'ra_1d 	rq_1d 	rsk_1d 	rku_1d 	rp_1d mode_1d'.split()
y_cols = ['rv_1d']
In [524]:
from sklearn.linear_model import LinearRegression
from sklearn.linear_model import Ridge
from sklearn.neural_network import MLPRegressor
from sklearn.preprocessing import PolynomialFeatures
from sklearn.model_selection import train_test_split
x = tmp[x_cols].values
y = tmp[y_cols].values
xtrain, xtest, ytrain, ytest = train_test_split(x, y, test_size=.2)
In [526]:
import tensorflow as tf
import tensorflow.keras as keras
In [533]:
model = keras.models.Sequential([
    keras.layers.Input([x.shape[-1]]),
    keras.layers.Dense(50, activation='relu'),
    keras.layers.Dropout(.3),
    keras.layers.Dense(100, activation='relu'),
    keras.layers.Dropout(.3),
    keras.layers.Dense(50, activation='relu'),
    keras.layers.Dropout(.3),
    keras.layers.Dense(50, activation='relu'),
    keras.layers.Dropout(.3),
    keras.layers.Dense(1),
])
model.compile(optimizer='adam', loss='mse')
#model = MLPRegressor(2000, )
In [538]:
model.fit(xtrain, ytrain, epochs=10000, validation_split=.1, 
          callbacks=[keras.callbacks.EarlyStopping(patience=1000, restore_best_weights=True)])
Train on 25 samples, validate on 3 samples
Epoch 1/10000
25/25 [==============================] - 0s 1ms/sample - loss: 510.6593 - val_loss: 52.5871
Epoch 2/10000
25/25 [==============================] - 0s 846us/sample - loss: 449.4058 - val_loss: 41.2241
Epoch 3/10000
25/25 [==============================] - 0s 923us/sample - loss: 504.8893 - val_loss: 35.8391
Epoch 4/10000
25/25 [==============================] - 0s 702us/sample - loss: 536.8865 - val_loss: 46.7287
Epoch 5/10000
25/25 [==============================] - 0s 802us/sample - loss: 529.8238 - val_loss: 73.0024
Epoch 6/10000
25/25 [==============================] - 0s 487us/sample - loss: 467.7168 - val_loss: 106.1427
Epoch 7/10000
25/25 [==============================] - 0s 985us/sample - loss: 662.3195 - val_loss: 130.1503
Epoch 8/10000
25/25 [==============================] - 0s 936us/sample - loss: 560.9750 - val_loss: 141.4988
Epoch 9/10000
25/25 [==============================] - 0s 891us/sample - loss: 582.5856 - val_loss: 136.6370
Epoch 10/10000
25/25 [==============================] - 0s 819us/sample - loss: 554.1155 - val_loss: 123.1696
Epoch 11/10000
25/25 [==============================] - 0s 817us/sample - loss: 757.6937 - val_loss: 98.9895
Epoch 12/10000
25/25 [==============================] - 0s 801us/sample - loss: 519.9938 - val_loss: 81.2685
Epoch 13/10000
25/25 [==============================] - 0s 795us/sample - loss: 254.1376 - val_loss: 63.4599
Epoch 14/10000
25/25 [==============================] - 0s 554us/sample - loss: 409.1635 - val_loss: 46.5330
Epoch 15/10000
25/25 [==============================] - 0s 1ms/sample - loss: 377.3172 - val_loss: 39.8049
Epoch 16/10000
25/25 [==============================] - 0s 885us/sample - loss: 405.7477 - val_loss: 34.6928
Epoch 17/10000
25/25 [==============================] - 0s 1ms/sample - loss: 479.2338 - val_loss: 33.0704
Epoch 18/10000
25/25 [==============================] - 0s 714us/sample - loss: 529.2546 - val_loss: 34.5858
Epoch 19/10000
25/25 [==============================] - 0s 1ms/sample - loss: 547.8597 - val_loss: 40.5658
Epoch 20/10000
25/25 [==============================] - 0s 991us/sample - loss: 492.2363 - val_loss: 58.2347
Epoch 21/10000
25/25 [==============================] - 0s 575us/sample - loss: 235.8424 - val_loss: 80.5268
Epoch 22/10000
25/25 [==============================] - 0s 1ms/sample - loss: 444.9171 - val_loss: 111.4245
Epoch 23/10000
25/25 [==============================] - 0s 842us/sample - loss: 302.9728 - val_loss: 138.8114
Epoch 24/10000
25/25 [==============================] - 0s 1ms/sample - loss: 566.3010 - val_loss: 167.3807
Epoch 25/10000
25/25 [==============================] - 0s 1ms/sample - loss: 294.8794 - val_loss: 175.0181
Epoch 26/10000
25/25 [==============================] - 0s 787us/sample - loss: 404.6125 - val_loss: 175.0478
Epoch 27/10000
25/25 [==============================] - 0s 811us/sample - loss: 533.9191 - val_loss: 165.5239
Epoch 28/10000
25/25 [==============================] - 0s 1ms/sample - loss: 267.6744 - val_loss: 148.5801
Epoch 29/10000
25/25 [==============================] - 0s 763us/sample - loss: 444.3852 - val_loss: 127.3991
Epoch 30/10000
25/25 [==============================] - 0s 768us/sample - loss: 241.6251 - val_loss: 100.3179
Epoch 31/10000
25/25 [==============================] - 0s 1ms/sample - loss: 382.3469 - val_loss: 77.8734
Epoch 32/10000
25/25 [==============================] - 0s 946us/sample - loss: 249.1544 - val_loss: 63.4413
Epoch 33/10000
25/25 [==============================] - 0s 885us/sample - loss: 200.9267 - val_loss: 55.5603
Epoch 34/10000
25/25 [==============================] - 0s 958us/sample - loss: 276.9537 - val_loss: 47.3357
Epoch 35/10000
25/25 [==============================] - 0s 1ms/sample - loss: 256.2354 - val_loss: 42.3374
Epoch 36/10000
25/25 [==============================] - 0s 784us/sample - loss: 230.4747 - val_loss: 38.0732
Epoch 37/10000
25/25 [==============================] - 0s 874us/sample - loss: 248.4962 - val_loss: 32.5327
Epoch 38/10000
25/25 [==============================] - 0s 1ms/sample - loss: 190.0742 - val_loss: 26.7439
Epoch 39/10000
25/25 [==============================] - 0s 1ms/sample - loss: 264.8452 - val_loss: 27.7327
Epoch 40/10000
25/25 [==============================] - 0s 776us/sample - loss: 219.6971 - val_loss: 32.9035
Epoch 41/10000
25/25 [==============================] - 0s 586us/sample - loss: 394.7666 - val_loss: 42.5831
Epoch 42/10000
25/25 [==============================] - 0s 829us/sample - loss: 152.8289 - val_loss: 58.5414
Epoch 43/10000
25/25 [==============================] - 0s 1ms/sample - loss: 390.6185 - val_loss: 76.3185
Epoch 44/10000
25/25 [==============================] - 0s 943us/sample - loss: 331.1840 - val_loss: 94.9224
Epoch 45/10000
25/25 [==============================] - 0s 799us/sample - loss: 192.8638 - val_loss: 107.1623
Epoch 46/10000
25/25 [==============================] - 0s 793us/sample - loss: 326.0858 - val_loss: 120.3853
Epoch 47/10000
25/25 [==============================] - 0s 815us/sample - loss: 394.7269 - val_loss: 145.4673
Epoch 48/10000
25/25 [==============================] - 0s 986us/sample - loss: 232.1943 - val_loss: 163.0301
Epoch 49/10000
25/25 [==============================] - 0s 954us/sample - loss: 279.5160 - val_loss: 179.4405
Epoch 50/10000
25/25 [==============================] - 0s 808us/sample - loss: 248.7141 - val_loss: 192.0273
Epoch 51/10000
25/25 [==============================] - 0s 774us/sample - loss: 216.1143 - val_loss: 189.1045
Epoch 52/10000
25/25 [==============================] - 0s 1ms/sample - loss: 280.4554 - val_loss: 175.4241
Epoch 53/10000
25/25 [==============================] - 0s 892us/sample - loss: 365.1361 - val_loss: 150.9656
Epoch 54/10000
25/25 [==============================] - 0s 1ms/sample - loss: 315.4451 - val_loss: 128.7588
Epoch 55/10000
25/25 [==============================] - 0s 702us/sample - loss: 230.0867 - val_loss: 106.2312
Epoch 56/10000
25/25 [==============================] - 0s 861us/sample - loss: 220.1565 - val_loss: 86.2187
Epoch 57/10000
25/25 [==============================] - 0s 816us/sample - loss: 505.9743 - val_loss: 80.2734
Epoch 58/10000
25/25 [==============================] - 0s 757us/sample - loss: 285.8565 - val_loss: 83.3774
Epoch 59/10000
25/25 [==============================] - 0s 928us/sample - loss: 415.0307 - val_loss: 87.8888
Epoch 60/10000
25/25 [==============================] - 0s 927us/sample - loss: 269.2136 - val_loss: 94.2609
Epoch 61/10000
25/25 [==============================] - 0s 874us/sample - loss: 426.3524 - val_loss: 97.1832
Epoch 62/10000
25/25 [==============================] - 0s 755us/sample - loss: 201.0085 - val_loss: 103.9797
Epoch 63/10000
25/25 [==============================] - 0s 1ms/sample - loss: 188.3500 - val_loss: 108.8521
Epoch 64/10000
25/25 [==============================] - 0s 653us/sample - loss: 289.6957 - val_loss: 119.1052
Epoch 65/10000
25/25 [==============================] - 0s 652us/sample - loss: 212.3507 - val_loss: 116.8868
Epoch 66/10000
25/25 [==============================] - 0s 1ms/sample - loss: 336.5113 - val_loss: 115.4229
Epoch 67/10000
25/25 [==============================] - 0s 771us/sample - loss: 349.2253 - val_loss: 119.3735
Epoch 68/10000
25/25 [==============================] - 0s 1ms/sample - loss: 272.2252 - val_loss: 121.7106
Epoch 69/10000
25/25 [==============================] - 0s 862us/sample - loss: 418.6286 - val_loss: 123.5309
Epoch 70/10000
25/25 [==============================] - 0s 1ms/sample - loss: 395.7732 - val_loss: 129.5416
Epoch 71/10000
25/25 [==============================] - 0s 821us/sample - loss: 295.7935 - val_loss: 135.0022
Epoch 72/10000
25/25 [==============================] - 0s 795us/sample - loss: 245.6270 - val_loss: 140.1210
Epoch 73/10000
25/25 [==============================] - 0s 791us/sample - loss: 264.9359 - val_loss: 144.0986
Epoch 74/10000
25/25 [==============================] - 0s 766us/sample - loss: 429.4355 - val_loss: 150.0656
Epoch 75/10000
25/25 [==============================] - 0s 777us/sample - loss: 211.9241 - val_loss: 150.0400
Epoch 76/10000
25/25 [==============================] - 0s 655us/sample - loss: 171.4387 - val_loss: 152.4774
Epoch 77/10000
25/25 [==============================] - 0s 886us/sample - loss: 305.3232 - val_loss: 157.5781
Epoch 78/10000
25/25 [==============================] - 0s 942us/sample - loss: 400.3374 - val_loss: 148.8256
Epoch 79/10000
25/25 [==============================] - 0s 940us/sample - loss: 228.1205 - val_loss: 131.4934
Epoch 80/10000
25/25 [==============================] - 0s 736us/sample - loss: 267.5552 - val_loss: 113.6091
Epoch 81/10000
25/25 [==============================] - 0s 1ms/sample - loss: 268.8904 - val_loss: 98.3038
Epoch 82/10000
25/25 [==============================] - 0s 828us/sample - loss: 241.0607 - val_loss: 81.9391
Epoch 83/10000
25/25 [==============================] - 0s 694us/sample - loss: 275.1093 - val_loss: 66.4114
Epoch 84/10000
25/25 [==============================] - 0s 1ms/sample - loss: 331.3108 - val_loss: 50.7660
Epoch 85/10000
25/25 [==============================] - 0s 777us/sample - loss: 381.9310 - val_loss: 45.9903
Epoch 86/10000
25/25 [==============================] - 0s 703us/sample - loss: 267.1809 - val_loss: 45.2580
Epoch 87/10000
25/25 [==============================] - 0s 523us/sample - loss: 421.5735 - val_loss: 53.7268
Epoch 88/10000
25/25 [==============================] - 0s 984us/sample - loss: 414.7794 - val_loss: 67.8661
Epoch 89/10000
25/25 [==============================] - 0s 908us/sample - loss: 252.3405 - val_loss: 82.7332
Epoch 90/10000
25/25 [==============================] - 0s 739us/sample - loss: 201.8947 - val_loss: 100.5805
Epoch 91/10000
25/25 [==============================] - 0s 1ms/sample - loss: 316.6141 - val_loss: 126.9722
Epoch 92/10000
25/25 [==============================] - 0s 1ms/sample - loss: 280.8840 - val_loss: 159.0546
Epoch 93/10000
25/25 [==============================] - 0s 986us/sample - loss: 186.5206 - val_loss: 188.0639
Epoch 94/10000
25/25 [==============================] - 0s 1ms/sample - loss: 237.3913 - val_loss: 209.5217
Epoch 95/10000
25/25 [==============================] - 0s 723us/sample - loss: 302.7535 - val_loss: 214.3731
Epoch 96/10000
25/25 [==============================] - 0s 634us/sample - loss: 287.5733 - val_loss: 209.4941
Epoch 97/10000
25/25 [==============================] - 0s 935us/sample - loss: 256.8185 - val_loss: 197.8690
Epoch 98/10000
25/25 [==============================] - 0s 849us/sample - loss: 224.6236 - val_loss: 175.8596
Epoch 99/10000
25/25 [==============================] - 0s 602us/sample - loss: 291.6416 - val_loss: 158.3556
Epoch 100/10000
25/25 [==============================] - 0s 726us/sample - loss: 304.6039 - val_loss: 142.7693
Epoch 101/10000
25/25 [==============================] - 0s 1ms/sample - loss: 342.1189 - val_loss: 139.2796
Epoch 102/10000
25/25 [==============================] - 0s 1ms/sample - loss: 132.9488 - val_loss: 139.1688
Epoch 103/10000
25/25 [==============================] - 0s 551us/sample - loss: 227.6767 - val_loss: 131.5389
Epoch 104/10000
25/25 [==============================] - 0s 1ms/sample - loss: 307.5635 - val_loss: 128.3782
Epoch 105/10000
25/25 [==============================] - 0s 837us/sample - loss: 278.2760 - val_loss: 120.7978
Epoch 106/10000
25/25 [==============================] - 0s 793us/sample - loss: 103.3830 - val_loss: 116.4252
Epoch 107/10000
25/25 [==============================] - 0s 1ms/sample - loss: 267.6824 - val_loss: 106.4050
Epoch 108/10000
25/25 [==============================] - 0s 525us/sample - loss: 302.6366 - val_loss: 88.8650
Epoch 109/10000
25/25 [==============================] - 0s 868us/sample - loss: 196.1115 - val_loss: 73.0878
Epoch 110/10000
25/25 [==============================] - 0s 1ms/sample - loss: 231.4885 - val_loss: 62.3743
Epoch 111/10000
25/25 [==============================] - 0s 775us/sample - loss: 211.9237 - val_loss: 50.1107
Epoch 112/10000
25/25 [==============================] - 0s 971us/sample - loss: 370.0143 - val_loss: 53.2517
Epoch 113/10000
25/25 [==============================] - 0s 859us/sample - loss: 318.1557 - val_loss: 61.2581
Epoch 114/10000
25/25 [==============================] - 0s 1ms/sample - loss: 242.4240 - val_loss: 70.5729
Epoch 115/10000
25/25 [==============================] - 0s 710us/sample - loss: 181.0216 - val_loss: 79.2620
Epoch 116/10000
25/25 [==============================] - 0s 1ms/sample - loss: 340.0545 - val_loss: 93.5948
Epoch 117/10000
25/25 [==============================] - 0s 828us/sample - loss: 188.3890 - val_loss: 119.4044
Epoch 118/10000
25/25 [==============================] - 0s 803us/sample - loss: 317.5847 - val_loss: 140.2824
Epoch 119/10000
25/25 [==============================] - 0s 646us/sample - loss: 152.4673 - val_loss: 150.2319
Epoch 120/10000
25/25 [==============================] - 0s 1ms/sample - loss: 287.1626 - val_loss: 160.6047
Epoch 121/10000
25/25 [==============================] - 0s 847us/sample - loss: 212.5976 - val_loss: 163.0354
Epoch 122/10000
25/25 [==============================] - 0s 719us/sample - loss: 336.3336 - val_loss: 154.6594
Epoch 123/10000
25/25 [==============================] - 0s 1ms/sample - loss: 250.2331 - val_loss: 142.3865
Epoch 124/10000
25/25 [==============================] - 0s 1ms/sample - loss: 161.0859 - val_loss: 140.1524
Epoch 125/10000
25/25 [==============================] - 0s 524us/sample - loss: 187.1780 - val_loss: 140.5323
Epoch 126/10000
25/25 [==============================] - 0s 1ms/sample - loss: 269.1674 - val_loss: 149.5015
Epoch 127/10000
25/25 [==============================] - 0s 1ms/sample - loss: 356.1123 - val_loss: 161.2009
Epoch 128/10000
25/25 [==============================] - 0s 823us/sample - loss: 371.4528 - val_loss: 182.2332
Epoch 129/10000
25/25 [==============================] - 0s 1ms/sample - loss: 291.3275 - val_loss: 198.5972
Epoch 130/10000
25/25 [==============================] - 0s 770us/sample - loss: 249.5260 - val_loss: 214.6354
Epoch 131/10000
25/25 [==============================] - 0s 1ms/sample - loss: 190.1629 - val_loss: 225.0683
Epoch 132/10000
25/25 [==============================] - 0s 705us/sample - loss: 367.0180 - val_loss: 215.4086
Epoch 133/10000
25/25 [==============================] - 0s 852us/sample - loss: 349.1516 - val_loss: 206.3945
Epoch 134/10000
25/25 [==============================] - 0s 688us/sample - loss: 205.5464 - val_loss: 188.9708
Epoch 135/10000
25/25 [==============================] - 0s 656us/sample - loss: 167.8899 - val_loss: 175.8071
Epoch 136/10000
25/25 [==============================] - 0s 996us/sample - loss: 180.8443 - val_loss: 166.9823
Epoch 137/10000
25/25 [==============================] - 0s 637us/sample - loss: 207.5040 - val_loss: 159.7324
Epoch 138/10000
25/25 [==============================] - 0s 670us/sample - loss: 103.7769 - val_loss: 153.4237
Epoch 139/10000
25/25 [==============================] - 0s 721us/sample - loss: 214.5879 - val_loss: 136.9119
Epoch 140/10000
25/25 [==============================] - 0s 795us/sample - loss: 373.4750 - val_loss: 136.4146
Epoch 141/10000
25/25 [==============================] - 0s 819us/sample - loss: 279.2855 - val_loss: 134.8769
Epoch 142/10000
25/25 [==============================] - 0s 985us/sample - loss: 284.1123 - val_loss: 137.6205
Epoch 143/10000
25/25 [==============================] - 0s 956us/sample - loss: 221.9161 - val_loss: 136.4382
Epoch 144/10000
25/25 [==============================] - 0s 985us/sample - loss: 250.9156 - val_loss: 142.5208
Epoch 145/10000
25/25 [==============================] - 0s 1ms/sample - loss: 340.1649 - val_loss: 153.8358
Epoch 146/10000
25/25 [==============================] - 0s 801us/sample - loss: 190.6250 - val_loss: 173.2055
Epoch 147/10000
25/25 [==============================] - 0s 954us/sample - loss: 363.9020 - val_loss: 215.4837
Epoch 148/10000
25/25 [==============================] - 0s 877us/sample - loss: 433.4553 - val_loss: 246.6305
Epoch 149/10000
25/25 [==============================] - 0s 1ms/sample - loss: 218.0494 - val_loss: 263.1883
Epoch 150/10000
25/25 [==============================] - 0s 1ms/sample - loss: 272.7433 - val_loss: 272.3083
Epoch 151/10000
25/25 [==============================] - 0s 840us/sample - loss: 249.4351 - val_loss: 274.2694
Epoch 152/10000
25/25 [==============================] - 0s 743us/sample - loss: 168.6934 - val_loss: 272.8271
Epoch 153/10000
25/25 [==============================] - 0s 768us/sample - loss: 312.1000 - val_loss: 260.0036
Epoch 154/10000
25/25 [==============================] - 0s 524us/sample - loss: 218.7669 - val_loss: 238.3474
Epoch 155/10000
25/25 [==============================] - 0s 719us/sample - loss: 321.8514 - val_loss: 217.9821
Epoch 156/10000
25/25 [==============================] - 0s 725us/sample - loss: 194.3087 - val_loss: 203.1341
Epoch 157/10000
25/25 [==============================] - 0s 969us/sample - loss: 192.3757 - val_loss: 171.7623
Epoch 158/10000
25/25 [==============================] - 0s 864us/sample - loss: 302.0200 - val_loss: 143.6726
Epoch 159/10000
25/25 [==============================] - 0s 745us/sample - loss: 253.0474 - val_loss: 129.2464
Epoch 160/10000
25/25 [==============================] - 0s 908us/sample - loss: 244.7717 - val_loss: 119.8293
Epoch 161/10000
25/25 [==============================] - 0s 754us/sample - loss: 300.5277 - val_loss: 131.4269
Epoch 162/10000
25/25 [==============================] - 0s 876us/sample - loss: 403.4374 - val_loss: 162.7486
Epoch 163/10000
25/25 [==============================] - 0s 1ms/sample - loss: 262.7087 - val_loss: 196.5222
Epoch 164/10000
25/25 [==============================] - 0s 850us/sample - loss: 353.5857 - val_loss: 242.6142
Epoch 165/10000
25/25 [==============================] - 0s 1ms/sample - loss: 244.5017 - val_loss: 277.8515
Epoch 166/10000
25/25 [==============================] - 0s 990us/sample - loss: 142.3539 - val_loss: 297.3828
Epoch 167/10000
25/25 [==============================] - 0s 817us/sample - loss: 286.2553 - val_loss: 304.9148
Epoch 168/10000
25/25 [==============================] - 0s 812us/sample - loss: 228.8812 - val_loss: 306.6856
Epoch 169/10000
25/25 [==============================] - 0s 907us/sample - loss: 319.1642 - val_loss: 291.3722
Epoch 170/10000
25/25 [==============================] - 0s 545us/sample - loss: 292.1414 - val_loss: 279.5737
Epoch 171/10000
25/25 [==============================] - 0s 827us/sample - loss: 310.6943 - val_loss: 258.5253
Epoch 172/10000
25/25 [==============================] - 0s 1ms/sample - loss: 208.3287 - val_loss: 228.2663
Epoch 173/10000
25/25 [==============================] - 0s 820us/sample - loss: 391.8676 - val_loss: 199.2330
Epoch 174/10000
25/25 [==============================] - 0s 1ms/sample - loss: 226.0730 - val_loss: 170.4929
Epoch 175/10000
25/25 [==============================] - 0s 759us/sample - loss: 210.3509 - val_loss: 154.1075
Epoch 176/10000
25/25 [==============================] - 0s 964us/sample - loss: 302.0701 - val_loss: 153.3965
Epoch 177/10000
25/25 [==============================] - 0s 892us/sample - loss: 341.6342 - val_loss: 166.0634
Epoch 178/10000
25/25 [==============================] - 0s 1ms/sample - loss: 381.5676 - val_loss: 187.0503
Epoch 179/10000
25/25 [==============================] - 0s 499us/sample - loss: 416.5232 - val_loss: 218.8345
Epoch 180/10000
25/25 [==============================] - 0s 822us/sample - loss: 191.2588 - val_loss: 248.1087
Epoch 181/10000
25/25 [==============================] - 0s 783us/sample - loss: 215.7060 - val_loss: 272.7453
Epoch 182/10000
25/25 [==============================] - 0s 835us/sample - loss: 314.6265 - val_loss: 284.7301
Epoch 183/10000
25/25 [==============================] - 0s 853us/sample - loss: 217.4637 - val_loss: 293.8442
Epoch 184/10000
25/25 [==============================] - 0s 920us/sample - loss: 181.3484 - val_loss: 288.1010
Epoch 185/10000
25/25 [==============================] - 0s 765us/sample - loss: 301.1964 - val_loss: 263.8606
Epoch 186/10000
25/25 [==============================] - 0s 692us/sample - loss: 297.7390 - val_loss: 242.8103
Epoch 187/10000
25/25 [==============================] - 0s 905us/sample - loss: 193.0248 - val_loss: 223.2028
Epoch 188/10000
25/25 [==============================] - 0s 947us/sample - loss: 192.7098 - val_loss: 206.3343
Epoch 189/10000
25/25 [==============================] - 0s 1ms/sample - loss: 187.3998 - val_loss: 191.5614
Epoch 190/10000
25/25 [==============================] - 0s 963us/sample - loss: 142.1605 - val_loss: 180.0210
Epoch 191/10000
25/25 [==============================] - 0s 615us/sample - loss: 230.4625 - val_loss: 170.9611
Epoch 192/10000
25/25 [==============================] - 0s 721us/sample - loss: 165.8312 - val_loss: 170.1348
Epoch 193/10000
25/25 [==============================] - 0s 574us/sample - loss: 148.4022 - val_loss: 174.4432
Epoch 194/10000
25/25 [==============================] - 0s 965us/sample - loss: 314.2549 - val_loss: 187.5234
Epoch 195/10000
25/25 [==============================] - 0s 869us/sample - loss: 217.2260 - val_loss: 191.3672
Epoch 196/10000
25/25 [==============================] - 0s 862us/sample - loss: 310.7664 - val_loss: 199.3505
Epoch 197/10000
25/25 [==============================] - 0s 874us/sample - loss: 196.3538 - val_loss: 203.8687
Epoch 198/10000
25/25 [==============================] - 0s 909us/sample - loss: 226.9357 - val_loss: 217.7499
Epoch 199/10000
25/25 [==============================] - 0s 680us/sample - loss: 348.6064 - val_loss: 245.6457
Epoch 200/10000
25/25 [==============================] - 0s 778us/sample - loss: 207.2242 - val_loss: 254.8980
Epoch 201/10000
25/25 [==============================] - 0s 837us/sample - loss: 136.4334 - val_loss: 258.2260
Epoch 202/10000
25/25 [==============================] - 0s 1ms/sample - loss: 153.8549 - val_loss: 257.4438
Epoch 203/10000
25/25 [==============================] - 0s 1ms/sample - loss: 347.7214 - val_loss: 266.7344
Epoch 204/10000
25/25 [==============================] - 0s 777us/sample - loss: 222.7590 - val_loss: 268.3335
Epoch 205/10000
25/25 [==============================] - 0s 718us/sample - loss: 182.3817 - val_loss: 272.0286
Epoch 206/10000
25/25 [==============================] - 0s 964us/sample - loss: 281.4375 - val_loss: 267.0981
Epoch 207/10000
25/25 [==============================] - 0s 757us/sample - loss: 349.1814 - val_loss: 260.3995
Epoch 208/10000
25/25 [==============================] - 0s 874us/sample - loss: 132.0408 - val_loss: 244.0349
Epoch 209/10000
25/25 [==============================] - 0s 492us/sample - loss: 154.9497 - val_loss: 228.5013
Epoch 210/10000
25/25 [==============================] - 0s 785us/sample - loss: 239.6936 - val_loss: 214.8601
Epoch 211/10000
25/25 [==============================] - 0s 889us/sample - loss: 311.0837 - val_loss: 210.8745
Epoch 212/10000
25/25 [==============================] - 0s 889us/sample - loss: 181.9344 - val_loss: 196.9042
Epoch 213/10000
25/25 [==============================] - 0s 1ms/sample - loss: 216.1535 - val_loss: 191.6915
Epoch 214/10000
25/25 [==============================] - 0s 1ms/sample - loss: 280.1620 - val_loss: 201.5864
Epoch 215/10000
25/25 [==============================] - 0s 1ms/sample - loss: 283.1704 - val_loss: 212.0110
Epoch 216/10000
25/25 [==============================] - 0s 913us/sample - loss: 470.0294 - val_loss: 251.6776
Epoch 217/10000
25/25 [==============================] - 0s 1ms/sample - loss: 369.9037 - val_loss: 285.8123
Epoch 218/10000
25/25 [==============================] - 0s 603us/sample - loss: 161.7079 - val_loss: 311.3221
Epoch 219/10000
25/25 [==============================] - 0s 952us/sample - loss: 283.9710 - val_loss: 328.0943
Epoch 220/10000
25/25 [==============================] - 0s 910us/sample - loss: 218.1950 - val_loss: 332.6554
Epoch 221/10000
25/25 [==============================] - 0s 789us/sample - loss: 192.0952 - val_loss: 329.2975
Epoch 222/10000
25/25 [==============================] - 0s 625us/sample - loss: 230.4174 - val_loss: 317.2012
Epoch 223/10000
25/25 [==============================] - 0s 756us/sample - loss: 189.9768 - val_loss: 292.6939
Epoch 224/10000
25/25 [==============================] - 0s 900us/sample - loss: 324.8716 - val_loss: 273.5047
Epoch 225/10000
25/25 [==============================] - 0s 1ms/sample - loss: 149.2479 - val_loss: 252.7173
Epoch 226/10000
25/25 [==============================] - 0s 845us/sample - loss: 423.7773 - val_loss: 238.0430
Epoch 227/10000
25/25 [==============================] - 0s 846us/sample - loss: 184.0783 - val_loss: 227.5690
Epoch 228/10000
25/25 [==============================] - 0s 976us/sample - loss: 140.0675 - val_loss: 218.1758
Epoch 229/10000
25/25 [==============================] - 0s 1ms/sample - loss: 217.3365 - val_loss: 202.6984
Epoch 230/10000
25/25 [==============================] - 0s 688us/sample - loss: 377.8656 - val_loss: 195.4953
Epoch 231/10000
25/25 [==============================] - 0s 704us/sample - loss: 230.7991 - val_loss: 190.9717
Epoch 232/10000
25/25 [==============================] - 0s 859us/sample - loss: 216.1563 - val_loss: 187.1995
Epoch 233/10000
25/25 [==============================] - 0s 1ms/sample - loss: 255.8945 - val_loss: 184.5138
Epoch 234/10000
25/25 [==============================] - 0s 811us/sample - loss: 199.6990 - val_loss: 199.1376
Epoch 235/10000
25/25 [==============================] - 0s 822us/sample - loss: 162.8794 - val_loss: 224.5836
Epoch 236/10000
25/25 [==============================] - 0s 940us/sample - loss: 240.1490 - val_loss: 242.9671
Epoch 237/10000
25/25 [==============================] - 0s 960us/sample - loss: 165.5179 - val_loss: 267.4383
Epoch 238/10000
25/25 [==============================] - 0s 854us/sample - loss: 371.8889 - val_loss: 298.3952
Epoch 239/10000
25/25 [==============================] - 0s 943us/sample - loss: 292.7760 - val_loss: 333.6524
Epoch 240/10000
25/25 [==============================] - 0s 983us/sample - loss: 160.0276 - val_loss: 376.2006
Epoch 241/10000
25/25 [==============================] - 0s 950us/sample - loss: 273.5565 - val_loss: 390.9926
Epoch 242/10000
25/25 [==============================] - 0s 890us/sample - loss: 341.2338 - val_loss: 393.8147
Epoch 243/10000
25/25 [==============================] - 0s 875us/sample - loss: 146.6403 - val_loss: 389.6893
Epoch 244/10000
25/25 [==============================] - 0s 754us/sample - loss: 240.1601 - val_loss: 372.6445
Epoch 245/10000
25/25 [==============================] - 0s 960us/sample - loss: 222.8724 - val_loss: 349.3505
Epoch 246/10000
25/25 [==============================] - 0s 805us/sample - loss: 256.8157 - val_loss: 324.3238
Epoch 247/10000
25/25 [==============================] - 0s 1ms/sample - loss: 250.4807 - val_loss: 296.0397
Epoch 248/10000
25/25 [==============================] - 0s 1ms/sample - loss: 223.5907 - val_loss: 269.7204
Epoch 249/10000
25/25 [==============================] - 0s 1ms/sample - loss: 135.7577 - val_loss: 239.3360
Epoch 250/10000
25/25 [==============================] - 0s 580us/sample - loss: 247.6579 - val_loss: 216.9288
Epoch 251/10000
25/25 [==============================] - 0s 830us/sample - loss: 247.9908 - val_loss: 197.2715
Epoch 252/10000
25/25 [==============================] - 0s 1ms/sample - loss: 223.0811 - val_loss: 183.8962
Epoch 253/10000
25/25 [==============================] - 0s 1ms/sample - loss: 280.0765 - val_loss: 184.9895
Epoch 254/10000
25/25 [==============================] - 0s 676us/sample - loss: 224.9159 - val_loss: 198.0872
Epoch 255/10000
25/25 [==============================] - 0s 589us/sample - loss: 152.5535 - val_loss: 211.7104
Epoch 256/10000
25/25 [==============================] - 0s 1000us/sample - loss: 175.8995 - val_loss: 234.6216
Epoch 257/10000
25/25 [==============================] - 0s 894us/sample - loss: 280.3788 - val_loss: 289.7213
Epoch 258/10000
25/25 [==============================] - 0s 814us/sample - loss: 122.3284 - val_loss: 341.8401
Epoch 259/10000
25/25 [==============================] - 0s 1ms/sample - loss: 322.8482 - val_loss: 389.1419
Epoch 260/10000
25/25 [==============================] - 0s 717us/sample - loss: 246.6528 - val_loss: 413.2961
Epoch 261/10000
25/25 [==============================] - 0s 993us/sample - loss: 285.7378 - val_loss: 422.5608
Epoch 262/10000
25/25 [==============================] - 0s 831us/sample - loss: 180.9164 - val_loss: 424.8636
Epoch 263/10000
25/25 [==============================] - 0s 806us/sample - loss: 222.0101 - val_loss: 423.7033
Epoch 264/10000
25/25 [==============================] - 0s 816us/sample - loss: 171.3756 - val_loss: 416.4186
Epoch 265/10000
25/25 [==============================] - 0s 937us/sample - loss: 119.4095 - val_loss: 399.2116
Epoch 266/10000
25/25 [==============================] - 0s 699us/sample - loss: 153.8596 - val_loss: 374.3734
Epoch 267/10000
25/25 [==============================] - 0s 1ms/sample - loss: 191.1808 - val_loss: 351.4279
Epoch 268/10000
25/25 [==============================] - 0s 1ms/sample - loss: 201.3554 - val_loss: 321.8201
Epoch 269/10000
25/25 [==============================] - 0s 857us/sample - loss: 103.8650 - val_loss: 297.5337
Epoch 270/10000
25/25 [==============================] - 0s 690us/sample - loss: 191.8777 - val_loss: 288.9707
Epoch 271/10000
25/25 [==============================] - 0s 916us/sample - loss: 194.0412 - val_loss: 284.0340
Epoch 272/10000
25/25 [==============================] - 0s 1ms/sample - loss: 240.3121 - val_loss: 283.4879
Epoch 273/10000
25/25 [==============================] - 0s 922us/sample - loss: 176.3884 - val_loss: 285.6446
Epoch 274/10000
25/25 [==============================] - 0s 813us/sample - loss: 106.9879 - val_loss: 283.5892
Epoch 275/10000
25/25 [==============================] - 0s 861us/sample - loss: 181.4545 - val_loss: 262.8326
Epoch 276/10000
25/25 [==============================] - 0s 843us/sample - loss: 274.0455 - val_loss: 256.3073
Epoch 277/10000
25/25 [==============================] - 0s 824us/sample - loss: 327.0611 - val_loss: 267.3087
Epoch 278/10000
25/25 [==============================] - 0s 789us/sample - loss: 210.6613 - val_loss: 270.2568
Epoch 279/10000
25/25 [==============================] - 0s 1ms/sample - loss: 175.5625 - val_loss: 279.9272
Epoch 280/10000
25/25 [==============================] - 0s 839us/sample - loss: 161.7829 - val_loss: 285.1411
Epoch 281/10000
25/25 [==============================] - 0s 981us/sample - loss: 234.9559 - val_loss: 300.0031
Epoch 282/10000
25/25 [==============================] - 0s 872us/sample - loss: 221.1684 - val_loss: 323.6173
Epoch 283/10000
25/25 [==============================] - 0s 923us/sample - loss: 104.8243 - val_loss: 339.0270
Epoch 284/10000
25/25 [==============================] - 0s 927us/sample - loss: 229.7001 - val_loss: 349.0506
Epoch 285/10000
25/25 [==============================] - 0s 926us/sample - loss: 148.6116 - val_loss: 359.4600
Epoch 286/10000
25/25 [==============================] - 0s 545us/sample - loss: 185.0594 - val_loss: 360.1834
Epoch 287/10000
25/25 [==============================] - 0s 833us/sample - loss: 164.7932 - val_loss: 363.1780
Epoch 288/10000
25/25 [==============================] - 0s 575us/sample - loss: 190.5636 - val_loss: 360.2660
Epoch 289/10000
25/25 [==============================] - 0s 715us/sample - loss: 208.1715 - val_loss: 354.3349
Epoch 290/10000
25/25 [==============================] - 0s 815us/sample - loss: 185.1061 - val_loss: 337.9784
Epoch 291/10000
25/25 [==============================] - 0s 1ms/sample - loss: 191.8911 - val_loss: 334.8330
Epoch 292/10000
25/25 [==============================] - 0s 1ms/sample - loss: 196.9815 - val_loss: 329.4445
Epoch 293/10000
25/25 [==============================] - 0s 775us/sample - loss: 218.9750 - val_loss: 320.8069
Epoch 294/10000
25/25 [==============================] - 0s 619us/sample - loss: 230.7654 - val_loss: 305.5363
Epoch 295/10000
25/25 [==============================] - 0s 748us/sample - loss: 189.3613 - val_loss: 292.8530
Epoch 296/10000
25/25 [==============================] - 0s 909us/sample - loss: 154.1034 - val_loss: 285.3998
Epoch 297/10000
25/25 [==============================] - 0s 740us/sample - loss: 153.3157 - val_loss: 282.2498
Epoch 298/10000
25/25 [==============================] - 0s 1ms/sample - loss: 298.6111 - val_loss: 277.3276
Epoch 299/10000
25/25 [==============================] - 0s 883us/sample - loss: 286.5737 - val_loss: 272.5565
Epoch 300/10000
25/25 [==============================] - 0s 1ms/sample - loss: 183.2107 - val_loss: 263.2785
Epoch 301/10000
25/25 [==============================] - 0s 768us/sample - loss: 214.4852 - val_loss: 269.1683
Epoch 302/10000
25/25 [==============================] - 0s 784us/sample - loss: 160.9160 - val_loss: 277.1427
Epoch 303/10000
25/25 [==============================] - 0s 846us/sample - loss: 173.1656 - val_loss: 285.2981
Epoch 304/10000
25/25 [==============================] - 0s 982us/sample - loss: 187.5966 - val_loss: 298.5068
Epoch 305/10000
25/25 [==============================] - 0s 985us/sample - loss: 212.8335 - val_loss: 304.3374
Epoch 306/10000
25/25 [==============================] - 0s 993us/sample - loss: 214.3607 - val_loss: 298.5608
Epoch 307/10000
25/25 [==============================] - 0s 726us/sample - loss: 206.3900 - val_loss: 287.1221
Epoch 308/10000
25/25 [==============================] - 0s 1ms/sample - loss: 190.2970 - val_loss: 271.2969
Epoch 309/10000
25/25 [==============================] - 0s 909us/sample - loss: 177.4078 - val_loss: 261.7780
Epoch 310/10000
25/25 [==============================] - 0s 667us/sample - loss: 265.4272 - val_loss: 266.2690
Epoch 311/10000
25/25 [==============================] - 0s 1ms/sample - loss: 386.5472 - val_loss: 288.0662
Epoch 312/10000
25/25 [==============================] - 0s 1ms/sample - loss: 317.7721 - val_loss: 311.3558
Epoch 313/10000
25/25 [==============================] - 0s 1ms/sample - loss: 290.0680 - val_loss: 356.8486
Epoch 314/10000
25/25 [==============================] - 0s 688us/sample - loss: 138.6144 - val_loss: 400.5972
Epoch 315/10000
25/25 [==============================] - 0s 910us/sample - loss: 117.5650 - val_loss: 419.9334
Epoch 316/10000
25/25 [==============================] - 0s 898us/sample - loss: 190.8910 - val_loss: 428.7489
Epoch 317/10000
25/25 [==============================] - 0s 918us/sample - loss: 257.1867 - val_loss: 430.7411
Epoch 318/10000
25/25 [==============================] - 0s 1ms/sample - loss: 114.1304 - val_loss: 422.1715
Epoch 319/10000
25/25 [==============================] - 0s 900us/sample - loss: 152.1188 - val_loss: 394.1045
Epoch 320/10000
25/25 [==============================] - 0s 693us/sample - loss: 117.8942 - val_loss: 365.2419
Epoch 321/10000
25/25 [==============================] - 0s 949us/sample - loss: 182.8723 - val_loss: 329.0397
Epoch 322/10000
25/25 [==============================] - 0s 876us/sample - loss: 176.8987 - val_loss: 281.4716
Epoch 323/10000
25/25 [==============================] - 0s 921us/sample - loss: 158.9905 - val_loss: 248.6748
Epoch 324/10000
25/25 [==============================] - 0s 861us/sample - loss: 125.6614 - val_loss: 228.6407
Epoch 325/10000
25/25 [==============================] - 0s 979us/sample - loss: 133.0220 - val_loss: 227.3633
Epoch 326/10000
25/25 [==============================] - 0s 1ms/sample - loss: 203.4321 - val_loss: 226.6351
Epoch 327/10000
25/25 [==============================] - 0s 953us/sample - loss: 338.8266 - val_loss: 263.7343
Epoch 328/10000
25/25 [==============================] - 0s 1ms/sample - loss: 228.4622 - val_loss: 319.0321
Epoch 329/10000
25/25 [==============================] - 0s 995us/sample - loss: 271.6120 - val_loss: 387.0276
Epoch 330/10000
25/25 [==============================] - 0s 979us/sample - loss: 288.7603 - val_loss: 440.3879
Epoch 331/10000
25/25 [==============================] - 0s 936us/sample - loss: 175.0305 - val_loss: 475.3652
Epoch 332/10000
25/25 [==============================] - 0s 841us/sample - loss: 152.3894 - val_loss: 502.2935
Epoch 333/10000
25/25 [==============================] - 0s 862us/sample - loss: 283.9383 - val_loss: 514.7247
Epoch 334/10000
25/25 [==============================] - 0s 830us/sample - loss: 281.0447 - val_loss: 499.2435
Epoch 335/10000
25/25 [==============================] - 0s 840us/sample - loss: 384.8589 - val_loss: 458.9475
Epoch 336/10000
25/25 [==============================] - 0s 783us/sample - loss: 156.8981 - val_loss: 412.6577
Epoch 337/10000
25/25 [==============================] - 0s 914us/sample - loss: 120.0043 - val_loss: 367.5998
Epoch 338/10000
25/25 [==============================] - 0s 954us/sample - loss: 153.8181 - val_loss: 320.4764
Epoch 339/10000
25/25 [==============================] - 0s 969us/sample - loss: 338.4478 - val_loss: 278.6968
Epoch 340/10000
25/25 [==============================] - 0s 968us/sample - loss: 229.3735 - val_loss: 253.4493
Epoch 341/10000
25/25 [==============================] - 0s 482us/sample - loss: 285.1442 - val_loss: 234.4000
Epoch 342/10000
25/25 [==============================] - 0s 860us/sample - loss: 187.0811 - val_loss: 224.7490
Epoch 343/10000
25/25 [==============================] - 0s 1ms/sample - loss: 207.0751 - val_loss: 220.4691
Epoch 344/10000
25/25 [==============================] - 0s 965us/sample - loss: 297.4789 - val_loss: 235.3699
Epoch 345/10000
25/25 [==============================] - 0s 800us/sample - loss: 338.9331 - val_loss: 267.6847
Epoch 346/10000
25/25 [==============================] - 0s 1ms/sample - loss: 265.1322 - val_loss: 311.4868
Epoch 347/10000
25/25 [==============================] - 0s 1ms/sample - loss: 258.7653 - val_loss: 346.4403
Epoch 348/10000
25/25 [==============================] - 0s 787us/sample - loss: 143.1071 - val_loss: 375.4232
Epoch 349/10000
25/25 [==============================] - 0s 1ms/sample - loss: 343.4701 - val_loss: 401.4550
Epoch 350/10000
25/25 [==============================] - 0s 814us/sample - loss: 148.1776 - val_loss: 419.2525
Epoch 351/10000
25/25 [==============================] - 0s 887us/sample - loss: 115.6744 - val_loss: 436.4760
Epoch 352/10000
25/25 [==============================] - 0s 1ms/sample - loss: 237.2657 - val_loss: 455.4513
Epoch 353/10000
25/25 [==============================] - 0s 1ms/sample - loss: 203.9601 - val_loss: 451.7955
Epoch 354/10000
25/25 [==============================] - 0s 972us/sample - loss: 112.1692 - val_loss: 438.0323
Epoch 355/10000
25/25 [==============================] - 0s 805us/sample - loss: 258.7471 - val_loss: 422.1615
Epoch 356/10000
25/25 [==============================] - 0s 724us/sample - loss: 211.2751 - val_loss: 400.5753
Epoch 357/10000
25/25 [==============================] - 0s 1ms/sample - loss: 155.2767 - val_loss: 376.7127
Epoch 358/10000
25/25 [==============================] - 0s 781us/sample - loss: 263.6273 - val_loss: 351.9349
Epoch 359/10000
25/25 [==============================] - 0s 1ms/sample - loss: 163.6930 - val_loss: 327.8816
Epoch 360/10000
25/25 [==============================] - 0s 563us/sample - loss: 292.2470 - val_loss: 324.5555
Epoch 361/10000
25/25 [==============================] - 0s 914us/sample - loss: 183.5437 - val_loss: 324.5735
Epoch 362/10000
25/25 [==============================] - 0s 776us/sample - loss: 252.3110 - val_loss: 351.9943
Epoch 363/10000
25/25 [==============================] - 0s 770us/sample - loss: 153.0115 - val_loss: 369.8354
Epoch 364/10000
25/25 [==============================] - 0s 922us/sample - loss: 158.6707 - val_loss: 370.0330
Epoch 365/10000
25/25 [==============================] - 0s 773us/sample - loss: 226.5727 - val_loss: 371.2512
Epoch 366/10000
25/25 [==============================] - 0s 947us/sample - loss: 197.4035 - val_loss: 371.6914
Epoch 367/10000
25/25 [==============================] - 0s 764us/sample - loss: 208.7547 - val_loss: 367.2899
Epoch 368/10000
25/25 [==============================] - 0s 916us/sample - loss: 174.0971 - val_loss: 360.9248
Epoch 369/10000
25/25 [==============================] - 0s 903us/sample - loss: 255.8481 - val_loss: 352.4143
Epoch 370/10000
25/25 [==============================] - 0s 513us/sample - loss: 172.1228 - val_loss: 347.4695
Epoch 371/10000
25/25 [==============================] - 0s 954us/sample - loss: 226.3838 - val_loss: 359.0532
Epoch 372/10000
25/25 [==============================] - 0s 1ms/sample - loss: 151.7801 - val_loss: 381.1211
Epoch 373/10000
25/25 [==============================] - 0s 819us/sample - loss: 170.0171 - val_loss: 397.1952
Epoch 374/10000
25/25 [==============================] - 0s 720us/sample - loss: 222.2903 - val_loss: 399.2599
Epoch 375/10000
25/25 [==============================] - 0s 785us/sample - loss: 186.4556 - val_loss: 403.5122
Epoch 376/10000
25/25 [==============================] - 0s 567us/sample - loss: 182.1147 - val_loss: 387.8492
Epoch 377/10000
25/25 [==============================] - 0s 1ms/sample - loss: 218.5799 - val_loss: 355.8175
Epoch 378/10000
25/25 [==============================] - 0s 1ms/sample - loss: 171.1754 - val_loss: 318.6921
Epoch 379/10000
25/25 [==============================] - 0s 782us/sample - loss: 173.4789 - val_loss: 309.7065
Epoch 380/10000
25/25 [==============================] - 0s 1ms/sample - loss: 166.0531 - val_loss: 306.4516
Epoch 381/10000
25/25 [==============================] - 0s 960us/sample - loss: 251.3842 - val_loss: 308.4500
Epoch 382/10000
25/25 [==============================] - 0s 852us/sample - loss: 151.0087 - val_loss: 313.0709
Epoch 383/10000
25/25 [==============================] - 0s 954us/sample - loss: 196.8779 - val_loss: 318.8416
Epoch 384/10000
25/25 [==============================] - 0s 757us/sample - loss: 211.8317 - val_loss: 324.8058
Epoch 385/10000
25/25 [==============================] - 0s 811us/sample - loss: 218.1111 - val_loss: 314.2799
Epoch 386/10000
25/25 [==============================] - 0s 668us/sample - loss: 204.4535 - val_loss: 299.8926
Epoch 387/10000
25/25 [==============================] - 0s 931us/sample - loss: 272.2539 - val_loss: 281.5725
Epoch 388/10000
25/25 [==============================] - 0s 711us/sample - loss: 215.7718 - val_loss: 256.6607
Epoch 389/10000
25/25 [==============================] - 0s 649us/sample - loss: 213.3053 - val_loss: 245.2818
Epoch 390/10000
25/25 [==============================] - 0s 1ms/sample - loss: 186.3024 - val_loss: 237.9208
Epoch 391/10000
25/25 [==============================] - 0s 1ms/sample - loss: 224.7465 - val_loss: 232.7478
Epoch 392/10000
25/25 [==============================] - 0s 764us/sample - loss: 154.2346 - val_loss: 233.8739
Epoch 393/10000
25/25 [==============================] - 0s 964us/sample - loss: 310.8616 - val_loss: 250.5272
Epoch 394/10000
25/25 [==============================] - 0s 1ms/sample - loss: 243.0077 - val_loss: 285.5809
Epoch 395/10000
25/25 [==============================] - 0s 850us/sample - loss: 106.4287 - val_loss: 321.3390
Epoch 396/10000
25/25 [==============================] - 0s 1ms/sample - loss: 191.0899 - val_loss: 345.3120
Epoch 397/10000
25/25 [==============================] - 0s 569us/sample - loss: 197.1893 - val_loss: 364.9559
Epoch 398/10000
25/25 [==============================] - 0s 886us/sample - loss: 204.8928 - val_loss: 381.0978
Epoch 399/10000
25/25 [==============================] - 0s 988us/sample - loss: 194.5104 - val_loss: 379.1584
Epoch 400/10000
25/25 [==============================] - 0s 869us/sample - loss: 205.2603 - val_loss: 371.4453
Epoch 401/10000
25/25 [==============================] - 0s 961us/sample - loss: 149.3807 - val_loss: 344.1806
Epoch 402/10000
25/25 [==============================] - 0s 718us/sample - loss: 169.7355 - val_loss: 318.1288
Epoch 403/10000
25/25 [==============================] - 0s 734us/sample - loss: 187.0768 - val_loss: 286.5244
Epoch 404/10000
25/25 [==============================] - 0s 894us/sample - loss: 111.9000 - val_loss: 260.9751
Epoch 405/10000
25/25 [==============================] - 0s 872us/sample - loss: 192.8287 - val_loss: 256.3480
Epoch 406/10000
25/25 [==============================] - 0s 988us/sample - loss: 121.6234 - val_loss: 265.4930
Epoch 407/10000
25/25 [==============================] - 0s 887us/sample - loss: 265.6530 - val_loss: 284.0977
Epoch 408/10000
25/25 [==============================] - 0s 1ms/sample - loss: 271.8456 - val_loss: 330.5764
Epoch 409/10000
25/25 [==============================] - 0s 968us/sample - loss: 164.3573 - val_loss: 376.2539
Epoch 410/10000
25/25 [==============================] - 0s 798us/sample - loss: 332.4723 - val_loss: 421.3664
Epoch 411/10000
25/25 [==============================] - 0s 861us/sample - loss: 126.3385 - val_loss: 450.9216
Epoch 412/10000
25/25 [==============================] - 0s 887us/sample - loss: 133.8875 - val_loss: 463.5858
Epoch 413/10000
25/25 [==============================] - 0s 849us/sample - loss: 198.8286 - val_loss: 462.5539
Epoch 414/10000
25/25 [==============================] - 0s 1ms/sample - loss: 238.1939 - val_loss: 455.2036
Epoch 415/10000
25/25 [==============================] - 0s 1ms/sample - loss: 259.9838 - val_loss: 420.0392
Epoch 416/10000
25/25 [==============================] - 0s 775us/sample - loss: 255.3831 - val_loss: 369.5420
Epoch 417/10000
25/25 [==============================] - 0s 940us/sample - loss: 131.6957 - val_loss: 310.1418
Epoch 418/10000
25/25 [==============================] - 0s 879us/sample - loss: 145.2517 - val_loss: 249.5827
Epoch 419/10000
25/25 [==============================] - 0s 474us/sample - loss: 214.8608 - val_loss: 225.1159
Epoch 420/10000
25/25 [==============================] - 0s 841us/sample - loss: 210.7199 - val_loss: 222.5302
Epoch 421/10000
25/25 [==============================] - 0s 885us/sample - loss: 187.5518 - val_loss: 229.2189
Epoch 422/10000
25/25 [==============================] - 0s 884us/sample - loss: 201.1872 - val_loss: 239.4228
Epoch 423/10000
25/25 [==============================] - 0s 883us/sample - loss: 183.1513 - val_loss: 258.0349
Epoch 424/10000
25/25 [==============================] - 0s 793us/sample - loss: 231.8140 - val_loss: 284.1587
Epoch 425/10000
25/25 [==============================] - 0s 827us/sample - loss: 194.2334 - val_loss: 301.8924
Epoch 426/10000
25/25 [==============================] - 0s 890us/sample - loss: 204.6611 - val_loss: 301.3644
Epoch 427/10000
25/25 [==============================] - 0s 825us/sample - loss: 188.7623 - val_loss: 319.6281
Epoch 428/10000
25/25 [==============================] - 0s 960us/sample - loss: 269.6381 - val_loss: 351.3948
Epoch 429/10000
25/25 [==============================] - 0s 839us/sample - loss: 251.7381 - val_loss: 364.9173
Epoch 430/10000
25/25 [==============================] - 0s 890us/sample - loss: 282.8992 - val_loss: 368.4579
Epoch 431/10000
25/25 [==============================] - 0s 655us/sample - loss: 188.4897 - val_loss: 351.4142
Epoch 432/10000
25/25 [==============================] - 0s 1ms/sample - loss: 194.1229 - val_loss: 330.9791
Epoch 433/10000
25/25 [==============================] - 0s 595us/sample - loss: 258.9664 - val_loss: 306.7633
Epoch 434/10000
25/25 [==============================] - 0s 867us/sample - loss: 207.4175 - val_loss: 281.2326
Epoch 435/10000
25/25 [==============================] - 0s 1ms/sample - loss: 197.7735 - val_loss: 253.2110
Epoch 436/10000
25/25 [==============================] - 0s 870us/sample - loss: 310.3184 - val_loss: 237.1903
Epoch 437/10000
25/25 [==============================] - 0s 837us/sample - loss: 135.3439 - val_loss: 233.4044
Epoch 438/10000
25/25 [==============================] - 0s 895us/sample - loss: 169.1381 - val_loss: 235.3943
Epoch 439/10000
25/25 [==============================] - 0s 1ms/sample - loss: 187.7195 - val_loss: 239.5270
Epoch 440/10000
25/25 [==============================] - 0s 478us/sample - loss: 161.6377 - val_loss: 235.7373
Epoch 441/10000
25/25 [==============================] - 0s 497us/sample - loss: 173.6539 - val_loss: 240.0260
Epoch 442/10000
25/25 [==============================] - 0s 921us/sample - loss: 163.3445 - val_loss: 246.8234
Epoch 443/10000
25/25 [==============================] - 0s 973us/sample - loss: 164.0548 - val_loss: 257.0681
Epoch 444/10000
25/25 [==============================] - 0s 604us/sample - loss: 181.0186 - val_loss: 281.0981
Epoch 445/10000
25/25 [==============================] - 0s 913us/sample - loss: 158.7239 - val_loss: 297.7785
Epoch 446/10000
25/25 [==============================] - 0s 660us/sample - loss: 164.1339 - val_loss: 317.9062
Epoch 447/10000
25/25 [==============================] - 0s 1ms/sample - loss: 354.7906 - val_loss: 348.4529
Epoch 448/10000
25/25 [==============================] - 0s 1ms/sample - loss: 317.1948 - val_loss: 369.1494
Epoch 449/10000
25/25 [==============================] - 0s 683us/sample - loss: 270.4509 - val_loss: 373.8101
Epoch 450/10000
25/25 [==============================] - 0s 901us/sample - loss: 186.1252 - val_loss: 386.1482
Epoch 451/10000
25/25 [==============================] - 0s 544us/sample - loss: 140.2717 - val_loss: 387.7007
Epoch 452/10000
25/25 [==============================] - 0s 879us/sample - loss: 195.0300 - val_loss: 375.8508
Epoch 453/10000
25/25 [==============================] - 0s 1ms/sample - loss: 190.9402 - val_loss: 364.9754
Epoch 454/10000
25/25 [==============================] - 0s 1ms/sample - loss: 199.3342 - val_loss: 339.8214
Epoch 455/10000
25/25 [==============================] - 0s 987us/sample - loss: 153.8058 - val_loss: 301.4921
Epoch 456/10000
25/25 [==============================] - 0s 760us/sample - loss: 177.9132 - val_loss: 259.7113
Epoch 457/10000
25/25 [==============================] - 0s 889us/sample - loss: 177.7778 - val_loss: 219.9143
Epoch 458/10000
25/25 [==============================] - 0s 976us/sample - loss: 183.5038 - val_loss: 200.6137
Epoch 459/10000
25/25 [==============================] - 0s 521us/sample - loss: 249.5319 - val_loss: 197.5936
Epoch 460/10000
25/25 [==============================] - 0s 838us/sample - loss: 192.0846 - val_loss: 220.6299
Epoch 461/10000
25/25 [==============================] - 0s 995us/sample - loss: 177.4722 - val_loss: 259.0466
Epoch 462/10000
25/25 [==============================] - 0s 1ms/sample - loss: 208.3449 - val_loss: 299.5109
Epoch 463/10000
25/25 [==============================] - 0s 674us/sample - loss: 112.2646 - val_loss: 336.5282
Epoch 464/10000
25/25 [==============================] - 0s 900us/sample - loss: 213.1535 - val_loss: 380.0714
Epoch 465/10000
25/25 [==============================] - 0s 869us/sample - loss: 201.1202 - val_loss: 404.1294
Epoch 466/10000
25/25 [==============================] - 0s 885us/sample - loss: 192.8795 - val_loss: 414.1750
Epoch 467/10000
25/25 [==============================] - 0s 1ms/sample - loss: 191.2466 - val_loss: 405.2792
Epoch 468/10000
25/25 [==============================] - 0s 955us/sample - loss: 231.1618 - val_loss: 390.4336
Epoch 469/10000
25/25 [==============================] - 0s 1ms/sample - loss: 162.4287 - val_loss: 365.2157
Epoch 470/10000
25/25 [==============================] - 0s 810us/sample - loss: 102.5369 - val_loss: 333.9323
Epoch 471/10000
25/25 [==============================] - 0s 826us/sample - loss: 155.2261 - val_loss: 315.3613
Epoch 472/10000
25/25 [==============================] - 0s 963us/sample - loss: 195.4392 - val_loss: 321.3875
Epoch 473/10000
25/25 [==============================] - 0s 1ms/sample - loss: 175.6924 - val_loss: 329.4197
Epoch 474/10000
25/25 [==============================] - 0s 892us/sample - loss: 156.7729 - val_loss: 339.5744
Epoch 475/10000
25/25 [==============================] - 0s 830us/sample - loss: 204.1257 - val_loss: 343.1920
Epoch 476/10000
25/25 [==============================] - 0s 580us/sample - loss: 212.2652 - val_loss: 339.7204
Epoch 477/10000
25/25 [==============================] - 0s 1ms/sample - loss: 231.0971 - val_loss: 344.1805
Epoch 478/10000
25/25 [==============================] - 0s 802us/sample - loss: 191.0376 - val_loss: 347.5356
Epoch 479/10000
25/25 [==============================] - 0s 895us/sample - loss: 119.7494 - val_loss: 352.1163
Epoch 480/10000
25/25 [==============================] - 0s 897us/sample - loss: 212.4350 - val_loss: 342.2491
Epoch 481/10000
25/25 [==============================] - 0s 918us/sample - loss: 147.9942 - val_loss: 320.0745
Epoch 482/10000
25/25 [==============================] - 0s 980us/sample - loss: 157.2050 - val_loss: 295.3593
Epoch 483/10000
25/25 [==============================] - 0s 742us/sample - loss: 121.9333 - val_loss: 277.1699
Epoch 484/10000
25/25 [==============================] - 0s 1ms/sample - loss: 121.0708 - val_loss: 248.2662
Epoch 485/10000
25/25 [==============================] - 0s 987us/sample - loss: 195.0866 - val_loss: 230.2043
Epoch 486/10000
25/25 [==============================] - 0s 852us/sample - loss: 220.3316 - val_loss: 231.7198
Epoch 487/10000
25/25 [==============================] - 0s 623us/sample - loss: 295.6010 - val_loss: 252.2428
Epoch 488/10000
25/25 [==============================] - 0s 848us/sample - loss: 216.7908 - val_loss: 285.0594
Epoch 489/10000
25/25 [==============================] - 0s 884us/sample - loss: 229.6020 - val_loss: 316.0383
Epoch 490/10000
25/25 [==============================] - 0s 553us/sample - loss: 159.8194 - val_loss: 335.6100
Epoch 491/10000
25/25 [==============================] - 0s 811us/sample - loss: 219.0394 - val_loss: 368.0886
Epoch 492/10000
25/25 [==============================] - 0s 654us/sample - loss: 142.5110 - val_loss: 391.3186
Epoch 493/10000
25/25 [==============================] - 0s 766us/sample - loss: 220.5504 - val_loss: 398.7744
Epoch 494/10000
25/25 [==============================] - 0s 952us/sample - loss: 198.4438 - val_loss: 395.0210
Epoch 495/10000
25/25 [==============================] - 0s 861us/sample - loss: 164.3015 - val_loss: 381.1733
Epoch 496/10000
25/25 [==============================] - 0s 671us/sample - loss: 157.0644 - val_loss: 381.1773
Epoch 497/10000
25/25 [==============================] - 0s 808us/sample - loss: 328.4457 - val_loss: 375.8205
Epoch 498/10000
25/25 [==============================] - 0s 914us/sample - loss: 136.3174 - val_loss: 370.9214
Epoch 499/10000
25/25 [==============================] - 0s 795us/sample - loss: 144.5703 - val_loss: 362.4872
Epoch 500/10000
25/25 [==============================] - 0s 837us/sample - loss: 162.3779 - val_loss: 344.4182
Epoch 501/10000
25/25 [==============================] - 0s 734us/sample - loss: 161.1974 - val_loss: 328.3646
Epoch 502/10000
25/25 [==============================] - 0s 731us/sample - loss: 232.5995 - val_loss: 316.4059
Epoch 503/10000
25/25 [==============================] - 0s 1ms/sample - loss: 121.4671 - val_loss: 303.9749
Epoch 504/10000
25/25 [==============================] - 0s 923us/sample - loss: 140.2176 - val_loss: 297.3052
Epoch 505/10000
25/25 [==============================] - 0s 822us/sample - loss: 183.0013 - val_loss: 305.6471
Epoch 506/10000
25/25 [==============================] - 0s 622us/sample - loss: 203.3174 - val_loss: 291.6324
Epoch 507/10000
25/25 [==============================] - 0s 607us/sample - loss: 243.6980 - val_loss: 283.2632
Epoch 508/10000
25/25 [==============================] - 0s 658us/sample - loss: 85.0595 - val_loss: 280.6924
Epoch 509/10000
25/25 [==============================] - 0s 835us/sample - loss: 161.8133 - val_loss: 281.6749
Epoch 510/10000
25/25 [==============================] - 0s 876us/sample - loss: 195.9916 - val_loss: 287.5978
Epoch 511/10000
25/25 [==============================] - 0s 899us/sample - loss: 341.9630 - val_loss: 318.4831
Epoch 512/10000
25/25 [==============================] - 0s 824us/sample - loss: 194.7530 - val_loss: 355.4962
Epoch 513/10000
25/25 [==============================] - 0s 613us/sample - loss: 156.0465 - val_loss: 390.2120
Epoch 514/10000
25/25 [==============================] - 0s 1ms/sample - loss: 123.1035 - val_loss: 414.1661
Epoch 515/10000
25/25 [==============================] - 0s 993us/sample - loss: 224.9438 - val_loss: 418.3612
Epoch 516/10000
25/25 [==============================] - 0s 1ms/sample - loss: 158.7331 - val_loss: 431.8933
Epoch 517/10000
25/25 [==============================] - 0s 917us/sample - loss: 231.3162 - val_loss: 439.6105
Epoch 518/10000
25/25 [==============================] - 0s 655us/sample - loss: 132.5425 - val_loss: 437.3270
Epoch 519/10000
25/25 [==============================] - 0s 971us/sample - loss: 194.2201 - val_loss: 431.4608
Epoch 520/10000
25/25 [==============================] - 0s 968us/sample - loss: 117.8237 - val_loss: 418.3497
Epoch 521/10000
25/25 [==============================] - 0s 643us/sample - loss: 115.3189 - val_loss: 399.9468
Epoch 522/10000
25/25 [==============================] - 0s 1ms/sample - loss: 132.4295 - val_loss: 376.7209
Epoch 523/10000
25/25 [==============================] - 0s 791us/sample - loss: 147.4681 - val_loss: 347.0279
Epoch 524/10000
25/25 [==============================] - 0s 767us/sample - loss: 305.8982 - val_loss: 336.5120
Epoch 525/10000
25/25 [==============================] - 0s 937us/sample - loss: 149.4489 - val_loss: 324.7137
Epoch 526/10000
25/25 [==============================] - 0s 992us/sample - loss: 173.9148 - val_loss: 333.8863
Epoch 527/10000
25/25 [==============================] - 0s 970us/sample - loss: 139.9190 - val_loss: 339.7726
Epoch 528/10000
25/25 [==============================] - 0s 599us/sample - loss: 219.3107 - val_loss: 342.6172
Epoch 529/10000
25/25 [==============================] - 0s 825us/sample - loss: 197.4247 - val_loss: 362.8785
Epoch 530/10000
25/25 [==============================] - 0s 719us/sample - loss: 219.5768 - val_loss: 379.4473
Epoch 531/10000
25/25 [==============================] - 0s 877us/sample - loss: 201.9021 - val_loss: 387.5150
Epoch 532/10000
25/25 [==============================] - 0s 690us/sample - loss: 256.2296 - val_loss: 410.5823
Epoch 533/10000
25/25 [==============================] - 0s 971us/sample - loss: 124.6928 - val_loss: 418.3070
Epoch 534/10000
25/25 [==============================] - 0s 785us/sample - loss: 255.3988 - val_loss: 427.0974
Epoch 535/10000
25/25 [==============================] - 0s 816us/sample - loss: 166.6311 - val_loss: 426.9104
Epoch 536/10000
25/25 [==============================] - 0s 774us/sample - loss: 258.1653 - val_loss: 427.7072
Epoch 537/10000
25/25 [==============================] - 0s 798us/sample - loss: 228.2417 - val_loss: 429.9698
Epoch 538/10000
25/25 [==============================] - 0s 713us/sample - loss: 194.2006 - val_loss: 431.3947
Epoch 539/10000
25/25 [==============================] - 0s 999us/sample - loss: 242.4876 - val_loss: 443.8692
Epoch 540/10000
25/25 [==============================] - 0s 977us/sample - loss: 204.5546 - val_loss: 455.6347
Epoch 541/10000
25/25 [==============================] - 0s 986us/sample - loss: 162.0773 - val_loss: 452.0171
Epoch 542/10000
25/25 [==============================] - 0s 809us/sample - loss: 171.1266 - val_loss: 430.2387
Epoch 543/10000
25/25 [==============================] - 0s 955us/sample - loss: 185.0677 - val_loss: 421.6596
Epoch 544/10000
25/25 [==============================] - 0s 1ms/sample - loss: 175.8002 - val_loss: 402.7589
Epoch 545/10000
25/25 [==============================] - 0s 901us/sample - loss: 132.8453 - val_loss: 377.8780
Epoch 546/10000
25/25 [==============================] - 0s 811us/sample - loss: 151.6095 - val_loss: 351.4976
Epoch 547/10000
25/25 [==============================] - 0s 814us/sample - loss: 181.3646 - val_loss: 337.0392
Epoch 548/10000
25/25 [==============================] - 0s 542us/sample - loss: 180.2605 - val_loss: 345.8334
Epoch 549/10000
25/25 [==============================] - 0s 663us/sample - loss: 165.7849 - val_loss: 350.8755
Epoch 550/10000
25/25 [==============================] - 0s 784us/sample - loss: 328.5948 - val_loss: 376.5365
Epoch 551/10000
25/25 [==============================] - 0s 773us/sample - loss: 165.6366 - val_loss: 400.9247
Epoch 552/10000
25/25 [==============================] - 0s 756us/sample - loss: 150.3599 - val_loss: 430.0212
Epoch 553/10000
25/25 [==============================] - 0s 733us/sample - loss: 108.9439 - val_loss: 448.1593
Epoch 554/10000
25/25 [==============================] - 0s 1ms/sample - loss: 220.5262 - val_loss: 458.1846
Epoch 555/10000
25/25 [==============================] - 0s 703us/sample - loss: 223.6022 - val_loss: 457.2585
Epoch 556/10000
25/25 [==============================] - 0s 551us/sample - loss: 161.1271 - val_loss: 448.3443
Epoch 557/10000
25/25 [==============================] - 0s 476us/sample - loss: 101.4637 - val_loss: 427.7931
Epoch 558/10000
25/25 [==============================] - 0s 803us/sample - loss: 177.4296 - val_loss: 416.8870
Epoch 559/10000
25/25 [==============================] - 0s 822us/sample - loss: 152.8818 - val_loss: 420.0658
Epoch 560/10000
25/25 [==============================] - 0s 853us/sample - loss: 132.3290 - val_loss: 426.5722
Epoch 561/10000
25/25 [==============================] - 0s 1ms/sample - loss: 216.4392 - val_loss: 415.4701
Epoch 562/10000
25/25 [==============================] - 0s 1ms/sample - loss: 104.1150 - val_loss: 410.8323
Epoch 563/10000
25/25 [==============================] - 0s 890us/sample - loss: 164.4307 - val_loss: 386.9013
Epoch 564/10000
25/25 [==============================] - 0s 901us/sample - loss: 262.9345 - val_loss: 388.0069
Epoch 565/10000
25/25 [==============================] - 0s 1ms/sample - loss: 197.1187 - val_loss: 408.7893
Epoch 566/10000
25/25 [==============================] - 0s 908us/sample - loss: 236.0867 - val_loss: 436.0882
Epoch 567/10000
25/25 [==============================] - 0s 821us/sample - loss: 106.3369 - val_loss: 460.3134
Epoch 568/10000
25/25 [==============================] - 0s 754us/sample - loss: 139.6673 - val_loss: 462.8041
Epoch 569/10000
25/25 [==============================] - 0s 819us/sample - loss: 181.3805 - val_loss: 446.6484
Epoch 570/10000
25/25 [==============================] - 0s 923us/sample - loss: 219.5513 - val_loss: 441.5448
Epoch 571/10000
25/25 [==============================] - 0s 843us/sample - loss: 170.0347 - val_loss: 441.5522
Epoch 572/10000
25/25 [==============================] - 0s 1ms/sample - loss: 208.6259 - val_loss: 425.6239
Epoch 573/10000
25/25 [==============================] - 0s 1ms/sample - loss: 192.8574 - val_loss: 413.0307
Epoch 574/10000
25/25 [==============================] - 0s 801us/sample - loss: 109.7314 - val_loss: 400.9125
Epoch 575/10000
25/25 [==============================] - 0s 1ms/sample - loss: 159.7534 - val_loss: 381.1248
Epoch 576/10000
25/25 [==============================] - 0s 1ms/sample - loss: 238.2566 - val_loss: 367.4146
Epoch 577/10000
25/25 [==============================] - 0s 899us/sample - loss: 149.0052 - val_loss: 378.0292
Epoch 578/10000
25/25 [==============================] - 0s 947us/sample - loss: 167.6887 - val_loss: 379.6749
Epoch 579/10000
25/25 [==============================] - 0s 890us/sample - loss: 121.2876 - val_loss: 382.3346
Epoch 580/10000
25/25 [==============================] - 0s 789us/sample - loss: 212.9168 - val_loss: 370.2663
Epoch 581/10000
25/25 [==============================] - 0s 860us/sample - loss: 67.2654 - val_loss: 362.1990
Epoch 582/10000
25/25 [==============================] - 0s 874us/sample - loss: 118.8025 - val_loss: 372.8610
Epoch 583/10000
25/25 [==============================] - 0s 898us/sample - loss: 163.8516 - val_loss: 388.2949
Epoch 584/10000
25/25 [==============================] - 0s 1ms/sample - loss: 146.7299 - val_loss: 392.7952
Epoch 585/10000
25/25 [==============================] - 0s 1ms/sample - loss: 162.7176 - val_loss: 397.3533
Epoch 586/10000
25/25 [==============================] - 0s 774us/sample - loss: 156.0327 - val_loss: 401.4709
Epoch 587/10000
25/25 [==============================] - 0s 919us/sample - loss: 127.3145 - val_loss: 407.9189
Epoch 588/10000
25/25 [==============================] - 0s 791us/sample - loss: 136.2798 - val_loss: 412.3142
Epoch 589/10000
25/25 [==============================] - 0s 542us/sample - loss: 201.6245 - val_loss: 419.5577
Epoch 590/10000
25/25 [==============================] - 0s 1ms/sample - loss: 90.1484 - val_loss: 437.9596
Epoch 591/10000
25/25 [==============================] - 0s 754us/sample - loss: 192.4329 - val_loss: 439.3433
Epoch 592/10000
25/25 [==============================] - 0s 743us/sample - loss: 159.2767 - val_loss: 441.9128
Epoch 593/10000
25/25 [==============================] - 0s 1ms/sample - loss: 184.2985 - val_loss: 442.2610
Epoch 594/10000
25/25 [==============================] - 0s 881us/sample - loss: 92.8615 - val_loss: 441.1571
Epoch 595/10000
25/25 [==============================] - 0s 998us/sample - loss: 170.8873 - val_loss: 437.4665
Epoch 596/10000
25/25 [==============================] - 0s 815us/sample - loss: 214.4464 - val_loss: 446.9788
Epoch 597/10000
25/25 [==============================] - 0s 953us/sample - loss: 98.0028 - val_loss: 445.4032
Epoch 598/10000
25/25 [==============================] - 0s 718us/sample - loss: 201.1096 - val_loss: 449.2655
Epoch 599/10000
25/25 [==============================] - 0s 1ms/sample - loss: 175.5125 - val_loss: 475.6226
Epoch 600/10000
25/25 [==============================] - 0s 1ms/sample - loss: 171.6402 - val_loss: 515.9800
Epoch 601/10000
25/25 [==============================] - 0s 898us/sample - loss: 122.6443 - val_loss: 546.3633
Epoch 602/10000
25/25 [==============================] - 0s 762us/sample - loss: 121.8816 - val_loss: 575.1671
Epoch 603/10000
25/25 [==============================] - 0s 808us/sample - loss: 219.7718 - val_loss: 593.8179
Epoch 604/10000
25/25 [==============================] - 0s 810us/sample - loss: 115.2483 - val_loss: 599.5621
Epoch 605/10000
25/25 [==============================] - 0s 780us/sample - loss: 166.1967 - val_loss: 589.1806
Epoch 606/10000
25/25 [==============================] - 0s 1ms/sample - loss: 123.9094 - val_loss: 561.7730
Epoch 607/10000
25/25 [==============================] - 0s 1ms/sample - loss: 137.8533 - val_loss: 513.8624
Epoch 608/10000
25/25 [==============================] - 0s 846us/sample - loss: 125.1715 - val_loss: 468.4547
Epoch 609/10000
25/25 [==============================] - 0s 750us/sample - loss: 199.0710 - val_loss: 440.8164
Epoch 610/10000
25/25 [==============================] - 0s 1ms/sample - loss: 215.9763 - val_loss: 430.1024
Epoch 611/10000
25/25 [==============================] - 0s 955us/sample - loss: 249.2898 - val_loss: 417.2247
Epoch 612/10000
25/25 [==============================] - 0s 1ms/sample - loss: 156.6747 - val_loss: 401.0739
Epoch 613/10000
25/25 [==============================] - 0s 668us/sample - loss: 198.0529 - val_loss: 416.4890
Epoch 614/10000
25/25 [==============================] - 0s 807us/sample - loss: 206.0947 - val_loss: 428.7028
Epoch 615/10000
25/25 [==============================] - 0s 1ms/sample - loss: 110.8253 - val_loss: 427.9060
Epoch 616/10000
25/25 [==============================] - 0s 1ms/sample - loss: 224.5378 - val_loss: 430.6670
Epoch 617/10000
25/25 [==============================] - 0s 861us/sample - loss: 164.3364 - val_loss: 425.2942
Epoch 618/10000
25/25 [==============================] - 0s 986us/sample - loss: 162.8129 - val_loss: 402.8987
Epoch 619/10000
25/25 [==============================] - 0s 1ms/sample - loss: 168.7177 - val_loss: 372.5674
Epoch 620/10000
25/25 [==============================] - 0s 948us/sample - loss: 246.5035 - val_loss: 354.1773
Epoch 621/10000
25/25 [==============================] - 0s 844us/sample - loss: 127.4579 - val_loss: 335.6950
Epoch 622/10000
25/25 [==============================] - 0s 948us/sample - loss: 130.1730 - val_loss: 309.4174
Epoch 623/10000
25/25 [==============================] - 0s 1ms/sample - loss: 111.5547 - val_loss: 297.5435
Epoch 624/10000
25/25 [==============================] - 0s 995us/sample - loss: 129.9223 - val_loss: 302.9622
Epoch 625/10000
25/25 [==============================] - 0s 1ms/sample - loss: 202.8803 - val_loss: 336.8071
Epoch 626/10000
25/25 [==============================] - 0s 723us/sample - loss: 147.5254 - val_loss: 387.0514
Epoch 627/10000
25/25 [==============================] - 0s 598us/sample - loss: 138.6644 - val_loss: 414.4198
Epoch 628/10000
25/25 [==============================] - 0s 996us/sample - loss: 337.0644 - val_loss: 424.2914
Epoch 629/10000
25/25 [==============================] - 0s 638us/sample - loss: 114.8363 - val_loss: 431.2428
Epoch 630/10000
25/25 [==============================] - 0s 1ms/sample - loss: 143.7592 - val_loss: 424.9676
Epoch 631/10000
25/25 [==============================] - 0s 853us/sample - loss: 160.6400 - val_loss: 419.7981
Epoch 632/10000
25/25 [==============================] - 0s 870us/sample - loss: 225.2120 - val_loss: 413.4129
Epoch 633/10000
25/25 [==============================] - 0s 1ms/sample - loss: 146.6692 - val_loss: 394.6798
Epoch 634/10000
25/25 [==============================] - 0s 718us/sample - loss: 146.5660 - val_loss: 362.9023
Epoch 635/10000
25/25 [==============================] - 0s 962us/sample - loss: 226.1091 - val_loss: 320.6963
Epoch 636/10000
25/25 [==============================] - 0s 1ms/sample - loss: 180.3554 - val_loss: 283.6450
Epoch 637/10000
25/25 [==============================] - 0s 889us/sample - loss: 129.6547 - val_loss: 265.5333
Epoch 638/10000
25/25 [==============================] - 0s 968us/sample - loss: 221.1947 - val_loss: 251.7185
Epoch 639/10000
25/25 [==============================] - 0s 998us/sample - loss: 139.9374 - val_loss: 251.2369
Epoch 640/10000
25/25 [==============================] - 0s 1ms/sample - loss: 133.5432 - val_loss: 257.9921
Epoch 641/10000
25/25 [==============================] - 0s 844us/sample - loss: 133.2126 - val_loss: 272.1734
Epoch 642/10000
25/25 [==============================] - 0s 960us/sample - loss: 175.5226 - val_loss: 284.3052
Epoch 643/10000
25/25 [==============================] - 0s 856us/sample - loss: 285.6194 - val_loss: 304.0445
Epoch 644/10000
25/25 [==============================] - 0s 1ms/sample - loss: 254.9483 - val_loss: 332.9609
Epoch 645/10000
25/25 [==============================] - 0s 769us/sample - loss: 126.7453 - val_loss: 360.5524
Epoch 646/10000
25/25 [==============================] - 0s 798us/sample - loss: 90.3345 - val_loss: 389.8409
Epoch 647/10000
25/25 [==============================] - 0s 828us/sample - loss: 141.7483 - val_loss: 421.8567
Epoch 648/10000
25/25 [==============================] - 0s 1ms/sample - loss: 102.4156 - val_loss: 446.2020
Epoch 649/10000
25/25 [==============================] - 0s 1ms/sample - loss: 259.1853 - val_loss: 447.9707
Epoch 650/10000
25/25 [==============================] - 0s 918us/sample - loss: 200.6609 - val_loss: 440.2768
Epoch 651/10000
25/25 [==============================] - 0s 872us/sample - loss: 145.2272 - val_loss: 417.7865
Epoch 652/10000
25/25 [==============================] - 0s 865us/sample - loss: 170.0696 - val_loss: 389.3278
Epoch 653/10000
25/25 [==============================] - 0s 862us/sample - loss: 153.0658 - val_loss: 353.1562
Epoch 654/10000
25/25 [==============================] - 0s 756us/sample - loss: 274.5804 - val_loss: 350.5607
Epoch 655/10000
25/25 [==============================] - 0s 808us/sample - loss: 161.6801 - val_loss: 362.9717
Epoch 656/10000
25/25 [==============================] - 0s 929us/sample - loss: 164.4333 - val_loss: 377.7969
Epoch 657/10000
25/25 [==============================] - 0s 1ms/sample - loss: 131.3626 - val_loss: 376.9378
Epoch 658/10000
25/25 [==============================] - 0s 629us/sample - loss: 116.0075 - val_loss: 364.3733
Epoch 659/10000
25/25 [==============================] - 0s 779us/sample - loss: 167.1689 - val_loss: 354.9514
Epoch 660/10000
25/25 [==============================] - 0s 949us/sample - loss: 154.1881 - val_loss: 342.8311
Epoch 661/10000
25/25 [==============================] - 0s 978us/sample - loss: 126.7659 - val_loss: 319.2703
Epoch 662/10000
25/25 [==============================] - 0s 809us/sample - loss: 125.2820 - val_loss: 310.2403
Epoch 663/10000
25/25 [==============================] - 0s 725us/sample - loss: 126.0508 - val_loss: 311.8224
Epoch 664/10000
25/25 [==============================] - 0s 1ms/sample - loss: 110.6602 - val_loss: 331.0126
Epoch 665/10000
25/25 [==============================] - 0s 1ms/sample - loss: 149.2981 - val_loss: 347.4720
Epoch 666/10000
25/25 [==============================] - 0s 1ms/sample - loss: 234.8335 - val_loss: 372.8160
Epoch 667/10000
25/25 [==============================] - 0s 1ms/sample - loss: 263.8469 - val_loss: 384.4648
Epoch 668/10000
25/25 [==============================] - 0s 736us/sample - loss: 116.2468 - val_loss: 395.8575
Epoch 669/10000
25/25 [==============================] - 0s 1ms/sample - loss: 181.5785 - val_loss: 407.4988
Epoch 670/10000
25/25 [==============================] - 0s 1ms/sample - loss: 119.6929 - val_loss: 410.6969
Epoch 671/10000
25/25 [==============================] - 0s 788us/sample - loss: 160.1998 - val_loss: 424.4087
Epoch 672/10000
25/25 [==============================] - 0s 831us/sample - loss: 191.7842 - val_loss: 426.5983
Epoch 673/10000
25/25 [==============================] - 0s 1ms/sample - loss: 151.2424 - val_loss: 423.7536
Epoch 674/10000
25/25 [==============================] - 0s 1ms/sample - loss: 194.8991 - val_loss: 412.9098
Epoch 675/10000
25/25 [==============================] - 0s 895us/sample - loss: 128.5990 - val_loss: 394.1720
Epoch 676/10000
25/25 [==============================] - 0s 879us/sample - loss: 124.6260 - val_loss: 373.5256
Epoch 677/10000
25/25 [==============================] - 0s 1ms/sample - loss: 148.5070 - val_loss: 346.7792
Epoch 678/10000
25/25 [==============================] - 0s 744us/sample - loss: 225.4141 - val_loss: 319.5540
Epoch 679/10000
25/25 [==============================] - 0s 754us/sample - loss: 225.2995 - val_loss: 309.9524
Epoch 680/10000
25/25 [==============================] - 0s 1ms/sample - loss: 134.5992 - val_loss: 299.7136
Epoch 681/10000
25/25 [==============================] - 0s 888us/sample - loss: 88.4737 - val_loss: 293.4831
Epoch 682/10000
25/25 [==============================] - 0s 939us/sample - loss: 148.0823 - val_loss: 293.3120
Epoch 683/10000
25/25 [==============================] - 0s 872us/sample - loss: 133.7849 - val_loss: 294.5281
Epoch 684/10000
25/25 [==============================] - 0s 798us/sample - loss: 131.2242 - val_loss: 292.3744
Epoch 685/10000
25/25 [==============================] - 0s 857us/sample - loss: 66.2326 - val_loss: 302.2480
Epoch 686/10000
25/25 [==============================] - 0s 981us/sample - loss: 202.2423 - val_loss: 343.9009
Epoch 687/10000
25/25 [==============================] - 0s 1ms/sample - loss: 146.4462 - val_loss: 376.4805
Epoch 688/10000
25/25 [==============================] - 0s 795us/sample - loss: 118.7739 - val_loss: 401.5095
Epoch 689/10000
25/25 [==============================] - 0s 951us/sample - loss: 111.0157 - val_loss: 425.9873
Epoch 690/10000
25/25 [==============================] - 0s 960us/sample - loss: 170.2554 - val_loss: 447.1667
Epoch 691/10000
25/25 [==============================] - 0s 635us/sample - loss: 268.0591 - val_loss: 463.5216
Epoch 692/10000
25/25 [==============================] - 0s 853us/sample - loss: 182.8760 - val_loss: 473.2526
Epoch 693/10000
25/25 [==============================] - 0s 870us/sample - loss: 160.0717 - val_loss: 472.5540
Epoch 694/10000
25/25 [==============================] - 0s 728us/sample - loss: 130.7275 - val_loss: 452.0844
Epoch 695/10000
25/25 [==============================] - 0s 892us/sample - loss: 170.3970 - val_loss: 419.5371
Epoch 696/10000
25/25 [==============================] - 0s 785us/sample - loss: 144.5936 - val_loss: 369.5807
Epoch 697/10000
25/25 [==============================] - 0s 1ms/sample - loss: 136.0313 - val_loss: 317.7802
Epoch 698/10000
25/25 [==============================] - 0s 871us/sample - loss: 161.1358 - val_loss: 293.6513
Epoch 699/10000
25/25 [==============================] - 0s 1ms/sample - loss: 253.3545 - val_loss: 298.2924
Epoch 700/10000
25/25 [==============================] - 0s 1ms/sample - loss: 159.4352 - val_loss: 316.4722
Epoch 701/10000
25/25 [==============================] - 0s 942us/sample - loss: 116.9127 - val_loss: 331.3044
Epoch 702/10000
25/25 [==============================] - 0s 971us/sample - loss: 207.9137 - val_loss: 359.0548
Epoch 703/10000
25/25 [==============================] - 0s 1ms/sample - loss: 144.9332 - val_loss: 378.9595
Epoch 704/10000
25/25 [==============================] - 0s 824us/sample - loss: 151.1703 - val_loss: 390.2841
Epoch 705/10000
25/25 [==============================] - 0s 918us/sample - loss: 91.7868 - val_loss: 398.0523
Epoch 706/10000
25/25 [==============================] - 0s 929us/sample - loss: 69.5548 - val_loss: 402.0590
Epoch 707/10000
25/25 [==============================] - 0s 658us/sample - loss: 178.5744 - val_loss: 393.4541
Epoch 708/10000
25/25 [==============================] - 0s 941us/sample - loss: 133.5427 - val_loss: 385.7823
Epoch 709/10000
25/25 [==============================] - 0s 930us/sample - loss: 232.7284 - val_loss: 383.4369
Epoch 710/10000
25/25 [==============================] - 0s 1ms/sample - loss: 152.4777 - val_loss: 376.4574
Epoch 711/10000
25/25 [==============================] - 0s 707us/sample - loss: 152.8637 - val_loss: 366.6161
Epoch 712/10000
25/25 [==============================] - 0s 754us/sample - loss: 131.5353 - val_loss: 362.7952
Epoch 713/10000
25/25 [==============================] - 0s 999us/sample - loss: 128.9364 - val_loss: 371.4026
Epoch 714/10000
25/25 [==============================] - 0s 1ms/sample - loss: 212.5423 - val_loss: 378.0977
Epoch 715/10000
25/25 [==============================] - 0s 727us/sample - loss: 181.8495 - val_loss: 376.0558
Epoch 716/10000
25/25 [==============================] - 0s 1ms/sample - loss: 154.5837 - val_loss: 386.3141
Epoch 717/10000
25/25 [==============================] - 0s 838us/sample - loss: 115.5699 - val_loss: 395.1926
Epoch 718/10000
25/25 [==============================] - 0s 916us/sample - loss: 208.4789 - val_loss: 396.7450
Epoch 719/10000
25/25 [==============================] - 0s 960us/sample - loss: 208.2981 - val_loss: 409.6773
Epoch 720/10000
25/25 [==============================] - 0s 858us/sample - loss: 211.6773 - val_loss: 413.0823
Epoch 721/10000
25/25 [==============================] - 0s 782us/sample - loss: 124.0766 - val_loss: 409.7085
Epoch 722/10000
25/25 [==============================] - 0s 867us/sample - loss: 123.1134 - val_loss: 404.7993
Epoch 723/10000
25/25 [==============================] - 0s 991us/sample - loss: 87.9336 - val_loss: 408.2010
Epoch 724/10000
25/25 [==============================] - 0s 966us/sample - loss: 192.5298 - val_loss: 419.0203
Epoch 725/10000
25/25 [==============================] - 0s 664us/sample - loss: 141.4157 - val_loss: 420.4814
Epoch 726/10000
25/25 [==============================] - 0s 1ms/sample - loss: 204.5645 - val_loss: 425.3786
Epoch 727/10000
25/25 [==============================] - 0s 702us/sample - loss: 262.2337 - val_loss: 408.3088
Epoch 728/10000
25/25 [==============================] - 0s 980us/sample - loss: 121.7292 - val_loss: 389.5829
Epoch 729/10000
25/25 [==============================] - 0s 988us/sample - loss: 172.5053 - val_loss: 403.0889
Epoch 730/10000
25/25 [==============================] - 0s 507us/sample - loss: 103.9266 - val_loss: 409.6520
Epoch 731/10000
25/25 [==============================] - 0s 1ms/sample - loss: 108.7936 - val_loss: 420.8532
Epoch 732/10000
25/25 [==============================] - 0s 932us/sample - loss: 267.4087 - val_loss: 439.6904
Epoch 733/10000
25/25 [==============================] - 0s 821us/sample - loss: 73.4358 - val_loss: 453.4789
Epoch 734/10000
25/25 [==============================] - 0s 630us/sample - loss: 114.7979 - val_loss: 461.1106
Epoch 735/10000
25/25 [==============================] - 0s 1ms/sample - loss: 86.4515 - val_loss: 472.6903
Epoch 736/10000
25/25 [==============================] - 0s 836us/sample - loss: 162.1264 - val_loss: 463.5768
Epoch 737/10000
25/25 [==============================] - 0s 903us/sample - loss: 135.1536 - val_loss: 441.0578
Epoch 738/10000
25/25 [==============================] - 0s 791us/sample - loss: 279.2039 - val_loss: 431.2671
Epoch 739/10000
25/25 [==============================] - 0s 896us/sample - loss: 248.8013 - val_loss: 414.8107
Epoch 740/10000
25/25 [==============================] - 0s 784us/sample - loss: 197.4196 - val_loss: 399.2487
Epoch 741/10000
25/25 [==============================] - 0s 931us/sample - loss: 126.7102 - val_loss: 374.5244
Epoch 742/10000
25/25 [==============================] - 0s 815us/sample - loss: 157.1531 - val_loss: 347.3826
Epoch 743/10000
25/25 [==============================] - 0s 1ms/sample - loss: 150.2021 - val_loss: 329.9197
Epoch 744/10000
25/25 [==============================] - 0s 1ms/sample - loss: 258.2521 - val_loss: 339.6237
Epoch 745/10000
25/25 [==============================] - 0s 978us/sample - loss: 191.3513 - val_loss: 351.1310
Epoch 746/10000
25/25 [==============================] - 0s 955us/sample - loss: 320.8573 - val_loss: 407.9621
Epoch 747/10000
25/25 [==============================] - 0s 829us/sample - loss: 194.3470 - val_loss: 463.6264
Epoch 748/10000
25/25 [==============================] - 0s 823us/sample - loss: 149.3677 - val_loss: 496.6391
Epoch 749/10000
25/25 [==============================] - 0s 759us/sample - loss: 229.9850 - val_loss: 512.7729
Epoch 750/10000
25/25 [==============================] - 0s 647us/sample - loss: 120.5881 - val_loss: 517.1987
Epoch 751/10000
25/25 [==============================] - 0s 1ms/sample - loss: 197.2927 - val_loss: 511.5151
Epoch 752/10000
25/25 [==============================] - 0s 804us/sample - loss: 116.1359 - val_loss: 496.8157
Epoch 753/10000
25/25 [==============================] - 0s 824us/sample - loss: 193.9156 - val_loss: 482.6694
Epoch 754/10000
25/25 [==============================] - 0s 1ms/sample - loss: 133.2956 - val_loss: 464.5482
Epoch 755/10000
25/25 [==============================] - 0s 1ms/sample - loss: 160.0794 - val_loss: 443.5256
Epoch 756/10000
25/25 [==============================] - 0s 785us/sample - loss: 233.5485 - val_loss: 431.1368
Epoch 757/10000
25/25 [==============================] - 0s 790us/sample - loss: 131.1971 - val_loss: 415.9226
Epoch 758/10000
25/25 [==============================] - 0s 875us/sample - loss: 169.4104 - val_loss: 399.6949
Epoch 759/10000
25/25 [==============================] - 0s 868us/sample - loss: 140.5880 - val_loss: 400.8379
Epoch 760/10000
25/25 [==============================] - 0s 779us/sample - loss: 171.8605 - val_loss: 416.2745
Epoch 761/10000
25/25 [==============================] - 0s 940us/sample - loss: 311.2318 - val_loss: 471.8968
Epoch 762/10000
25/25 [==============================] - 0s 701us/sample - loss: 220.5502 - val_loss: 517.4835
Epoch 763/10000
25/25 [==============================] - 0s 821us/sample - loss: 113.3759 - val_loss: 546.8762
Epoch 764/10000
25/25 [==============================] - 0s 660us/sample - loss: 182.4784 - val_loss: 562.4379
Epoch 765/10000
25/25 [==============================] - 0s 909us/sample - loss: 161.6762 - val_loss: 575.5607
Epoch 766/10000
25/25 [==============================] - 0s 957us/sample - loss: 262.3905 - val_loss: 574.1169
Epoch 767/10000
25/25 [==============================] - 0s 766us/sample - loss: 119.3456 - val_loss: 566.4450
Epoch 768/10000
25/25 [==============================] - 0s 708us/sample - loss: 135.9003 - val_loss: 542.7992
Epoch 769/10000
25/25 [==============================] - 0s 887us/sample - loss: 194.2640 - val_loss: 508.6688
Epoch 770/10000
25/25 [==============================] - 0s 863us/sample - loss: 123.7232 - val_loss: 466.4386
Epoch 771/10000
25/25 [==============================] - 0s 925us/sample - loss: 134.0894 - val_loss: 407.8546
Epoch 772/10000
25/25 [==============================] - 0s 933us/sample - loss: 120.6648 - val_loss: 348.5407
Epoch 773/10000
25/25 [==============================] - 0s 716us/sample - loss: 180.9035 - val_loss: 291.3159
Epoch 774/10000
25/25 [==============================] - 0s 783us/sample - loss: 150.5940 - val_loss: 253.1526
Epoch 775/10000
25/25 [==============================] - 0s 662us/sample - loss: 155.4536 - val_loss: 253.5223
Epoch 776/10000
25/25 [==============================] - 0s 809us/sample - loss: 142.5499 - val_loss: 260.6422
Epoch 777/10000
25/25 [==============================] - 0s 665us/sample - loss: 216.3473 - val_loss: 282.9301
Epoch 778/10000
25/25 [==============================] - 0s 623us/sample - loss: 164.7683 - val_loss: 338.6949
Epoch 779/10000
25/25 [==============================] - 0s 910us/sample - loss: 164.0445 - val_loss: 403.2361
Epoch 780/10000
25/25 [==============================] - 0s 850us/sample - loss: 150.0539 - val_loss: 464.6702
Epoch 781/10000
25/25 [==============================] - 0s 962us/sample - loss: 217.0081 - val_loss: 506.2100
Epoch 782/10000
25/25 [==============================] - 0s 819us/sample - loss: 130.8520 - val_loss: 528.9388
Epoch 783/10000
25/25 [==============================] - 0s 958us/sample - loss: 244.0223 - val_loss: 538.3738
Epoch 784/10000
25/25 [==============================] - 0s 805us/sample - loss: 221.2314 - val_loss: 531.1621
Epoch 785/10000
25/25 [==============================] - 0s 854us/sample - loss: 180.7861 - val_loss: 508.2417
Epoch 786/10000
25/25 [==============================] - 0s 921us/sample - loss: 168.2749 - val_loss: 471.1280
Epoch 787/10000
25/25 [==============================] - 0s 899us/sample - loss: 112.1348 - val_loss: 432.3596
Epoch 788/10000
25/25 [==============================] - 0s 810us/sample - loss: 258.5435 - val_loss: 402.1516
Epoch 789/10000
25/25 [==============================] - 0s 766us/sample - loss: 233.3927 - val_loss: 376.2229
Epoch 790/10000
25/25 [==============================] - 0s 976us/sample - loss: 223.7944 - val_loss: 364.4979
Epoch 791/10000
25/25 [==============================] - 0s 994us/sample - loss: 157.9086 - val_loss: 363.7493
Epoch 792/10000
25/25 [==============================] - 0s 1ms/sample - loss: 169.7806 - val_loss: 346.8476
Epoch 793/10000
25/25 [==============================] - 0s 1ms/sample - loss: 163.3423 - val_loss: 347.5237
Epoch 794/10000
25/25 [==============================] - 0s 1ms/sample - loss: 170.0418 - val_loss: 355.9564
Epoch 795/10000
25/25 [==============================] - 0s 950us/sample - loss: 224.9252 - val_loss: 372.6624
Epoch 796/10000
25/25 [==============================] - 0s 866us/sample - loss: 96.8823 - val_loss: 388.6670
Epoch 797/10000
25/25 [==============================] - 0s 829us/sample - loss: 96.5961 - val_loss: 394.6722
Epoch 798/10000
25/25 [==============================] - 0s 752us/sample - loss: 237.9455 - val_loss: 397.0587
Epoch 799/10000
25/25 [==============================] - 0s 980us/sample - loss: 163.6274 - val_loss: 400.7509
Epoch 800/10000
25/25 [==============================] - 0s 1ms/sample - loss: 192.5100 - val_loss: 402.8032
Epoch 801/10000
25/25 [==============================] - 0s 792us/sample - loss: 82.4846 - val_loss: 402.5987
Epoch 802/10000
25/25 [==============================] - 0s 862us/sample - loss: 165.2055 - val_loss: 392.6199
Epoch 803/10000
25/25 [==============================] - 0s 820us/sample - loss: 205.4564 - val_loss: 383.1122
Epoch 804/10000
25/25 [==============================] - 0s 825us/sample - loss: 199.5062 - val_loss: 350.8186
Epoch 805/10000
25/25 [==============================] - 0s 802us/sample - loss: 110.8790 - val_loss: 320.3600
Epoch 806/10000
25/25 [==============================] - 0s 823us/sample - loss: 189.3350 - val_loss: 287.3701
Epoch 807/10000
25/25 [==============================] - 0s 790us/sample - loss: 139.9569 - val_loss: 253.8304
Epoch 808/10000
25/25 [==============================] - 0s 661us/sample - loss: 74.6631 - val_loss: 223.7570
Epoch 809/10000
25/25 [==============================] - 0s 1ms/sample - loss: 217.3460 - val_loss: 210.3542
Epoch 810/10000
25/25 [==============================] - 0s 843us/sample - loss: 242.4095 - val_loss: 221.3400
Epoch 811/10000
25/25 [==============================] - 0s 753us/sample - loss: 120.2669 - val_loss: 241.8480
Epoch 812/10000
25/25 [==============================] - 0s 1ms/sample - loss: 157.5957 - val_loss: 270.1068
Epoch 813/10000
25/25 [==============================] - 0s 1ms/sample - loss: 129.9149 - val_loss: 303.4653
Epoch 814/10000
25/25 [==============================] - 0s 962us/sample - loss: 172.0744 - val_loss: 332.6057
Epoch 815/10000
25/25 [==============================] - 0s 783us/sample - loss: 173.3632 - val_loss: 354.7437
Epoch 816/10000
25/25 [==============================] - 0s 1ms/sample - loss: 75.8397 - val_loss: 368.4493
Epoch 817/10000
25/25 [==============================] - 0s 1ms/sample - loss: 149.7371 - val_loss: 378.6638
Epoch 818/10000
25/25 [==============================] - 0s 855us/sample - loss: 138.9825 - val_loss: 390.7525
Epoch 819/10000
25/25 [==============================] - 0s 901us/sample - loss: 112.1051 - val_loss: 404.8650
Epoch 820/10000
25/25 [==============================] - 0s 895us/sample - loss: 192.9859 - val_loss: 402.7634
Epoch 821/10000
25/25 [==============================] - 0s 1ms/sample - loss: 208.9067 - val_loss: 386.7528
Epoch 822/10000
25/25 [==============================] - 0s 711us/sample - loss: 92.1648 - val_loss: 364.1460
Epoch 823/10000
25/25 [==============================] - 0s 1ms/sample - loss: 251.7174 - val_loss: 341.2853
Epoch 824/10000
25/25 [==============================] - 0s 1ms/sample - loss: 123.7895 - val_loss: 320.4890
Epoch 825/10000
25/25 [==============================] - 0s 1ms/sample - loss: 164.8871 - val_loss: 307.4959
Epoch 826/10000
25/25 [==============================] - 0s 770us/sample - loss: 130.6164 - val_loss: 309.4460
Epoch 827/10000
25/25 [==============================] - 0s 870us/sample - loss: 128.3423 - val_loss: 310.8117
Epoch 828/10000
25/25 [==============================] - 0s 718us/sample - loss: 230.3347 - val_loss: 316.3711
Epoch 829/10000
25/25 [==============================] - 0s 972us/sample - loss: 117.4209 - val_loss: 325.2339
Epoch 830/10000
25/25 [==============================] - 0s 782us/sample - loss: 187.7561 - val_loss: 356.8078
Epoch 831/10000
25/25 [==============================] - 0s 758us/sample - loss: 179.8405 - val_loss: 377.3055
Epoch 832/10000
25/25 [==============================] - 0s 820us/sample - loss: 168.4148 - val_loss: 394.8281
Epoch 833/10000
25/25 [==============================] - 0s 694us/sample - loss: 124.8143 - val_loss: 415.3828
Epoch 834/10000
25/25 [==============================] - 0s 450us/sample - loss: 171.6460 - val_loss: 426.4006
Epoch 835/10000
25/25 [==============================] - 0s 1ms/sample - loss: 114.9754 - val_loss: 433.3391
Epoch 836/10000
25/25 [==============================] - 0s 876us/sample - loss: 162.2289 - val_loss: 447.0383
Epoch 837/10000
25/25 [==============================] - 0s 761us/sample - loss: 152.8317 - val_loss: 450.0691
Epoch 838/10000
25/25 [==============================] - 0s 1ms/sample - loss: 128.5679 - val_loss: 455.7396
Epoch 839/10000
25/25 [==============================] - 0s 855us/sample - loss: 94.8714 - val_loss: 444.4041
Epoch 840/10000
25/25 [==============================] - 0s 701us/sample - loss: 137.0670 - val_loss: 429.0781
Epoch 841/10000
25/25 [==============================] - 0s 1ms/sample - loss: 91.8670 - val_loss: 410.7489
Epoch 842/10000
25/25 [==============================] - 0s 1ms/sample - loss: 176.6094 - val_loss: 409.3609
Epoch 843/10000
25/25 [==============================] - 0s 957us/sample - loss: 213.7721 - val_loss: 410.0663
Epoch 844/10000
25/25 [==============================] - 0s 1ms/sample - loss: 183.3785 - val_loss: 409.4938
Epoch 845/10000
25/25 [==============================] - 0s 999us/sample - loss: 179.7865 - val_loss: 417.5334
Epoch 846/10000
25/25 [==============================] - 0s 924us/sample - loss: 131.7875 - val_loss: 441.9259
Epoch 847/10000
25/25 [==============================] - 0s 809us/sample - loss: 138.7031 - val_loss: 457.4293
Epoch 848/10000
25/25 [==============================] - 0s 1ms/sample - loss: 172.4914 - val_loss: 484.6084
Epoch 849/10000
25/25 [==============================] - 0s 1ms/sample - loss: 123.1270 - val_loss: 506.4644
Epoch 850/10000
25/25 [==============================] - 0s 946us/sample - loss: 128.7630 - val_loss: 515.9556
Epoch 851/10000
25/25 [==============================] - 0s 906us/sample - loss: 173.0580 - val_loss: 521.9626
Epoch 852/10000
25/25 [==============================] - 0s 802us/sample - loss: 186.4047 - val_loss: 525.1335
Epoch 853/10000
25/25 [==============================] - 0s 685us/sample - loss: 228.0240 - val_loss: 524.1505
Epoch 854/10000
25/25 [==============================] - 0s 895us/sample - loss: 179.3492 - val_loss: 504.4700
Epoch 855/10000
25/25 [==============================] - 0s 920us/sample - loss: 200.5943 - val_loss: 466.9738
Epoch 856/10000
25/25 [==============================] - 0s 1ms/sample - loss: 204.5072 - val_loss: 424.7102
Epoch 857/10000
25/25 [==============================] - 0s 911us/sample - loss: 235.8987 - val_loss: 400.3473
Epoch 858/10000
25/25 [==============================] - 0s 722us/sample - loss: 152.8205 - val_loss: 379.8378
Epoch 859/10000
25/25 [==============================] - 0s 814us/sample - loss: 173.7800 - val_loss: 359.5166
Epoch 860/10000
25/25 [==============================] - 0s 797us/sample - loss: 147.0878 - val_loss: 355.7694
Epoch 861/10000
25/25 [==============================] - 0s 627us/sample - loss: 155.8846 - val_loss: 365.2937
Epoch 862/10000
25/25 [==============================] - 0s 701us/sample - loss: 133.6014 - val_loss: 368.6859
Epoch 863/10000
25/25 [==============================] - 0s 1ms/sample - loss: 199.7087 - val_loss: 376.5502
Epoch 864/10000
25/25 [==============================] - 0s 905us/sample - loss: 108.2402 - val_loss: 389.4877
Epoch 865/10000
25/25 [==============================] - 0s 988us/sample - loss: 165.5198 - val_loss: 402.4348
Epoch 866/10000
25/25 [==============================] - 0s 905us/sample - loss: 108.0598 - val_loss: 412.2519
Epoch 867/10000
25/25 [==============================] - 0s 874us/sample - loss: 74.7050 - val_loss: 414.3367
Epoch 868/10000
25/25 [==============================] - 0s 902us/sample - loss: 178.2455 - val_loss: 411.0854
Epoch 869/10000
25/25 [==============================] - 0s 1ms/sample - loss: 134.0368 - val_loss: 413.3617
Epoch 870/10000
25/25 [==============================] - 0s 878us/sample - loss: 179.5410 - val_loss: 429.7860
Epoch 871/10000
25/25 [==============================] - 0s 773us/sample - loss: 137.9564 - val_loss: 429.8063
Epoch 872/10000
25/25 [==============================] - 0s 882us/sample - loss: 185.3354 - val_loss: 416.4305
Epoch 873/10000
25/25 [==============================] - 0s 732us/sample - loss: 82.2816 - val_loss: 405.6889
Epoch 874/10000
25/25 [==============================] - 0s 966us/sample - loss: 142.4400 - val_loss: 388.1109
Epoch 875/10000
25/25 [==============================] - 0s 855us/sample - loss: 216.5447 - val_loss: 381.5630
Epoch 876/10000
25/25 [==============================] - 0s 845us/sample - loss: 105.7947 - val_loss: 363.0280
Epoch 877/10000
25/25 [==============================] - 0s 868us/sample - loss: 119.0340 - val_loss: 350.6266
Epoch 878/10000
25/25 [==============================] - 0s 823us/sample - loss: 127.1735 - val_loss: 332.6164
Epoch 879/10000
25/25 [==============================] - 0s 798us/sample - loss: 88.5358 - val_loss: 324.8984
Epoch 880/10000
25/25 [==============================] - 0s 864us/sample - loss: 244.3555 - val_loss: 339.3567
Epoch 881/10000
25/25 [==============================] - 0s 667us/sample - loss: 285.5625 - val_loss: 356.6549
Epoch 882/10000
25/25 [==============================] - 0s 827us/sample - loss: 113.8458 - val_loss: 375.5798
Epoch 883/10000
25/25 [==============================] - 0s 865us/sample - loss: 180.9166 - val_loss: 408.3509
Epoch 884/10000
25/25 [==============================] - 0s 1ms/sample - loss: 130.6286 - val_loss: 454.4712
Epoch 885/10000
25/25 [==============================] - 0s 1ms/sample - loss: 125.8251 - val_loss: 495.1239
Epoch 886/10000
25/25 [==============================] - 0s 870us/sample - loss: 171.8646 - val_loss: 542.6395
Epoch 887/10000
25/25 [==============================] - 0s 636us/sample - loss: 145.6663 - val_loss: 562.5829
Epoch 888/10000
25/25 [==============================] - 0s 1ms/sample - loss: 228.6656 - val_loss: 561.7415
Epoch 889/10000
25/25 [==============================] - 0s 821us/sample - loss: 138.5529 - val_loss: 550.1630
Epoch 890/10000
25/25 [==============================] - 0s 792us/sample - loss: 200.5323 - val_loss: 539.2888
Epoch 891/10000
25/25 [==============================] - 0s 861us/sample - loss: 201.0224 - val_loss: 510.0996
Epoch 892/10000
25/25 [==============================] - 0s 1ms/sample - loss: 210.8832 - val_loss: 468.1649
Epoch 893/10000
25/25 [==============================] - 0s 690us/sample - loss: 128.6393 - val_loss: 424.9847
Epoch 894/10000
25/25 [==============================] - 0s 738us/sample - loss: 181.8834 - val_loss: 379.6498
Epoch 895/10000
25/25 [==============================] - 0s 942us/sample - loss: 83.4929 - val_loss: 353.0858
Epoch 896/10000
25/25 [==============================] - 0s 903us/sample - loss: 152.5502 - val_loss: 338.8690
Epoch 897/10000
25/25 [==============================] - 0s 802us/sample - loss: 189.0492 - val_loss: 337.6459
Epoch 898/10000
25/25 [==============================] - 0s 815us/sample - loss: 204.1585 - val_loss: 335.1909
Epoch 899/10000
25/25 [==============================] - 0s 971us/sample - loss: 145.7317 - val_loss: 332.0682
Epoch 900/10000
25/25 [==============================] - 0s 966us/sample - loss: 253.8256 - val_loss: 348.0193
Epoch 901/10000
25/25 [==============================] - 0s 1ms/sample - loss: 101.2497 - val_loss: 358.5764
Epoch 902/10000
25/25 [==============================] - 0s 769us/sample - loss: 87.6936 - val_loss: 382.1677
Epoch 903/10000
25/25 [==============================] - 0s 1ms/sample - loss: 121.5437 - val_loss: 411.7269
Epoch 904/10000
25/25 [==============================] - 0s 889us/sample - loss: 138.6208 - val_loss: 438.0144
Epoch 905/10000
25/25 [==============================] - 0s 939us/sample - loss: 271.0219 - val_loss: 468.2881
Epoch 906/10000
25/25 [==============================] - 0s 891us/sample - loss: 157.6295 - val_loss: 480.8263
Epoch 907/10000
25/25 [==============================] - 0s 1ms/sample - loss: 159.8832 - val_loss: 476.9828
Epoch 908/10000
25/25 [==============================] - 0s 718us/sample - loss: 150.1392 - val_loss: 460.6414
Epoch 909/10000
25/25 [==============================] - 0s 936us/sample - loss: 182.0307 - val_loss: 430.9022
Epoch 910/10000
25/25 [==============================] - 0s 1ms/sample - loss: 218.8333 - val_loss: 409.4240
Epoch 911/10000
25/25 [==============================] - 0s 960us/sample - loss: 108.0853 - val_loss: 392.3433
Epoch 912/10000
25/25 [==============================] - 0s 1ms/sample - loss: 143.3065 - val_loss: 367.1907
Epoch 913/10000
25/25 [==============================] - 0s 819us/sample - loss: 169.8886 - val_loss: 351.1444
Epoch 914/10000
25/25 [==============================] - 0s 850us/sample - loss: 56.5034 - val_loss: 324.4037
Epoch 915/10000
25/25 [==============================] - 0s 953us/sample - loss: 153.7061 - val_loss: 296.1466
Epoch 916/10000
25/25 [==============================] - 0s 1ms/sample - loss: 159.3970 - val_loss: 286.4192
Epoch 917/10000
25/25 [==============================] - 0s 820us/sample - loss: 220.9518 - val_loss: 302.4830
Epoch 918/10000
25/25 [==============================] - 0s 666us/sample - loss: 174.1902 - val_loss: 318.5663
Epoch 919/10000
25/25 [==============================] - 0s 860us/sample - loss: 185.0200 - val_loss: 346.5466
Epoch 920/10000
25/25 [==============================] - 0s 814us/sample - loss: 210.6610 - val_loss: 380.4120
Epoch 921/10000
25/25 [==============================] - 0s 824us/sample - loss: 153.5572 - val_loss: 420.4985
Epoch 922/10000
25/25 [==============================] - 0s 636us/sample - loss: 213.0471 - val_loss: 459.6301
Epoch 923/10000
25/25 [==============================] - 0s 1ms/sample - loss: 161.5550 - val_loss: 483.4120
Epoch 924/10000
25/25 [==============================] - 0s 733us/sample - loss: 220.0615 - val_loss: 505.5623
Epoch 925/10000
25/25 [==============================] - 0s 968us/sample - loss: 134.3380 - val_loss: 508.3923
Epoch 926/10000
25/25 [==============================] - 0s 971us/sample - loss: 128.3575 - val_loss: 491.8377
Epoch 927/10000
25/25 [==============================] - 0s 849us/sample - loss: 144.3806 - val_loss: 461.8843
Epoch 928/10000
25/25 [==============================] - 0s 796us/sample - loss: 151.8369 - val_loss: 428.5409
Epoch 929/10000
25/25 [==============================] - 0s 1ms/sample - loss: 196.6901 - val_loss: 379.3365
Epoch 930/10000
25/25 [==============================] - 0s 810us/sample - loss: 111.3516 - val_loss: 344.3561
Epoch 931/10000
25/25 [==============================] - 0s 821us/sample - loss: 164.5419 - val_loss: 325.5129
Epoch 932/10000
25/25 [==============================] - 0s 821us/sample - loss: 160.2898 - val_loss: 317.7495
Epoch 933/10000
25/25 [==============================] - 0s 817us/sample - loss: 105.4631 - val_loss: 320.4717
Epoch 934/10000
25/25 [==============================] - 0s 921us/sample - loss: 122.0184 - val_loss: 315.6970
Epoch 935/10000
25/25 [==============================] - 0s 826us/sample - loss: 170.2099 - val_loss: 325.6648
Epoch 936/10000
25/25 [==============================] - 0s 829us/sample - loss: 128.6911 - val_loss: 356.8109
Epoch 937/10000
25/25 [==============================] - 0s 1ms/sample - loss: 176.1083 - val_loss: 391.6549
Epoch 938/10000
25/25 [==============================] - 0s 525us/sample - loss: 133.7189 - val_loss: 423.4845
Epoch 939/10000
25/25 [==============================] - 0s 948us/sample - loss: 160.0949 - val_loss: 432.1372
Epoch 940/10000
25/25 [==============================] - 0s 822us/sample - loss: 162.8670 - val_loss: 441.6847
Epoch 941/10000
25/25 [==============================] - 0s 883us/sample - loss: 334.3146 - val_loss: 444.1813
Epoch 942/10000
25/25 [==============================] - 0s 1ms/sample - loss: 260.0139 - val_loss: 443.3225
Epoch 943/10000
25/25 [==============================] - 0s 697us/sample - loss: 154.8405 - val_loss: 439.7831
Epoch 944/10000
25/25 [==============================] - 0s 543us/sample - loss: 220.7908 - val_loss: 440.1190
Epoch 945/10000
25/25 [==============================] - 0s 831us/sample - loss: 105.5938 - val_loss: 431.0271
Epoch 946/10000
25/25 [==============================] - 0s 555us/sample - loss: 79.6533 - val_loss: 425.3790
Epoch 947/10000
25/25 [==============================] - 0s 841us/sample - loss: 133.4879 - val_loss: 414.6597
Epoch 948/10000
25/25 [==============================] - 0s 861us/sample - loss: 99.7362 - val_loss: 425.1254
Epoch 949/10000
25/25 [==============================] - 0s 833us/sample - loss: 179.8160 - val_loss: 445.5087
Epoch 950/10000
25/25 [==============================] - 0s 1ms/sample - loss: 128.9588 - val_loss: 456.0387
Epoch 951/10000
25/25 [==============================] - 0s 1ms/sample - loss: 226.7733 - val_loss: 481.1345
Epoch 952/10000
25/25 [==============================] - 0s 984us/sample - loss: 188.1989 - val_loss: 497.6593
Epoch 953/10000
25/25 [==============================] - 0s 859us/sample - loss: 92.4878 - val_loss: 516.2575
Epoch 954/10000
25/25 [==============================] - 0s 609us/sample - loss: 231.6009 - val_loss: 517.1135
Epoch 955/10000
25/25 [==============================] - 0s 992us/sample - loss: 129.6619 - val_loss: 516.9460
Epoch 956/10000
25/25 [==============================] - 0s 945us/sample - loss: 154.2822 - val_loss: 512.0306
Epoch 957/10000
25/25 [==============================] - 0s 1ms/sample - loss: 200.5724 - val_loss: 505.8926
Epoch 958/10000
25/25 [==============================] - 0s 955us/sample - loss: 136.1036 - val_loss: 512.4005
Epoch 959/10000
25/25 [==============================] - 0s 813us/sample - loss: 175.2997 - val_loss: 509.6732
Epoch 960/10000
25/25 [==============================] - 0s 792us/sample - loss: 179.9346 - val_loss: 498.6477
Epoch 961/10000
25/25 [==============================] - 0s 785us/sample - loss: 160.1772 - val_loss: 477.3753
Epoch 962/10000
25/25 [==============================] - 0s 783us/sample - loss: 144.6837 - val_loss: 452.3873
Epoch 963/10000
25/25 [==============================] - 0s 1ms/sample - loss: 126.9760 - val_loss: 424.0326
Epoch 964/10000
25/25 [==============================] - 0s 968us/sample - loss: 127.3668 - val_loss: 401.6467
Epoch 965/10000
25/25 [==============================] - 0s 1ms/sample - loss: 252.6135 - val_loss: 398.2215
Epoch 966/10000
25/25 [==============================] - 0s 616us/sample - loss: 173.6171 - val_loss: 392.6938
Epoch 967/10000
25/25 [==============================] - 0s 889us/sample - loss: 300.7928 - val_loss: 387.5185
Epoch 968/10000
25/25 [==============================] - 0s 880us/sample - loss: 130.9747 - val_loss: 372.1299
Epoch 969/10000
25/25 [==============================] - 0s 774us/sample - loss: 77.8947 - val_loss: 353.8611
Epoch 970/10000
25/25 [==============================] - 0s 963us/sample - loss: 100.1141 - val_loss: 336.1671
Epoch 971/10000
25/25 [==============================] - 0s 1ms/sample - loss: 76.3174 - val_loss: 328.2490
Epoch 972/10000
25/25 [==============================] - 0s 868us/sample - loss: 122.2788 - val_loss: 331.7647
Epoch 973/10000
25/25 [==============================] - 0s 1ms/sample - loss: 173.1505 - val_loss: 348.4882
Epoch 974/10000
25/25 [==============================] - 0s 924us/sample - loss: 179.5977 - val_loss: 382.6466
Epoch 975/10000
25/25 [==============================] - 0s 830us/sample - loss: 124.4060 - val_loss: 410.9309
Epoch 976/10000
25/25 [==============================] - 0s 508us/sample - loss: 239.8148 - val_loss: 443.6978
Epoch 977/10000
25/25 [==============================] - 0s 803us/sample - loss: 100.5953 - val_loss: 458.5197
Epoch 978/10000
25/25 [==============================] - 0s 709us/sample - loss: 110.3007 - val_loss: 466.0925
Epoch 979/10000
25/25 [==============================] - 0s 591us/sample - loss: 156.4556 - val_loss: 462.5746
Epoch 980/10000
25/25 [==============================] - 0s 1ms/sample - loss: 173.5518 - val_loss: 452.3103
Epoch 981/10000
25/25 [==============================] - 0s 871us/sample - loss: 217.1037 - val_loss: 433.7945
Epoch 982/10000
25/25 [==============================] - 0s 982us/sample - loss: 201.1111 - val_loss: 408.9202
Epoch 983/10000
25/25 [==============================] - 0s 767us/sample - loss: 271.2451 - val_loss: 394.6056
Epoch 984/10000
25/25 [==============================] - 0s 734us/sample - loss: 150.6757 - val_loss: 379.4501
Epoch 985/10000
25/25 [==============================] - 0s 869us/sample - loss: 110.1169 - val_loss: 373.7717
Epoch 986/10000
25/25 [==============================] - 0s 887us/sample - loss: 182.9739 - val_loss: 384.5250
Epoch 987/10000
25/25 [==============================] - 0s 670us/sample - loss: 214.1135 - val_loss: 384.6789
Epoch 988/10000
25/25 [==============================] - 0s 755us/sample - loss: 139.0950 - val_loss: 388.2985
Epoch 989/10000
25/25 [==============================] - 0s 1ms/sample - loss: 228.9813 - val_loss: 398.3872
Epoch 990/10000
25/25 [==============================] - 0s 823us/sample - loss: 200.7729 - val_loss: 397.4268
Epoch 991/10000
25/25 [==============================] - 0s 727us/sample - loss: 195.7713 - val_loss: 398.7120
Epoch 992/10000
25/25 [==============================] - 0s 636us/sample - loss: 121.7891 - val_loss: 388.1992
Epoch 993/10000
25/25 [==============================] - 0s 912us/sample - loss: 81.3046 - val_loss: 386.1554
Epoch 994/10000
25/25 [==============================] - 0s 976us/sample - loss: 138.7369 - val_loss: 390.6075
Epoch 995/10000
25/25 [==============================] - 0s 805us/sample - loss: 168.7914 - val_loss: 400.5886
Epoch 996/10000
25/25 [==============================] - 0s 1ms/sample - loss: 182.0240 - val_loss: 407.6120
Epoch 997/10000
25/25 [==============================] - 0s 1ms/sample - loss: 197.7520 - val_loss: 423.7771
Epoch 998/10000
25/25 [==============================] - 0s 760us/sample - loss: 162.3470 - val_loss: 440.9093
Epoch 999/10000
25/25 [==============================] - 0s 906us/sample - loss: 126.8197 - val_loss: 441.9878
Epoch 1000/10000
25/25 [==============================] - 0s 932us/sample - loss: 160.2100 - val_loss: 438.5616
Epoch 1001/10000
25/25 [==============================] - 0s 1ms/sample - loss: 141.5212 - val_loss: 436.9389
Epoch 1002/10000
25/25 [==============================] - 0s 852us/sample - loss: 213.6841 - val_loss: 426.0619
Epoch 1003/10000
25/25 [==============================] - 0s 1ms/sample - loss: 137.0784 - val_loss: 407.9806
Epoch 1004/10000
25/25 [==============================] - 0s 1ms/sample - loss: 106.4376 - val_loss: 379.9196
Epoch 1005/10000
25/25 [==============================] - 0s 809us/sample - loss: 159.1677 - val_loss: 353.7372
Epoch 1006/10000
25/25 [==============================] - 0s 823us/sample - loss: 134.0103 - val_loss: 325.6395
Epoch 1007/10000
25/25 [==============================] - 0s 636us/sample - loss: 131.9675 - val_loss: 315.7410
Epoch 1008/10000
25/25 [==============================] - 0s 1ms/sample - loss: 157.1071 - val_loss: 330.1910
Epoch 1009/10000
25/25 [==============================] - 0s 849us/sample - loss: 284.3492 - val_loss: 361.3929
Epoch 1010/10000
25/25 [==============================] - 0s 1ms/sample - loss: 224.0399 - val_loss: 404.8394
Epoch 1011/10000
25/25 [==============================] - 0s 927us/sample - loss: 125.2153 - val_loss: 434.7740
Epoch 1012/10000
25/25 [==============================] - 0s 855us/sample - loss: 184.8831 - val_loss: 466.2383
Epoch 1013/10000
25/25 [==============================] - 0s 690us/sample - loss: 163.8604 - val_loss: 498.9162
Epoch 1014/10000
25/25 [==============================] - 0s 1ms/sample - loss: 115.8211 - val_loss: 506.2538
Epoch 1015/10000
25/25 [==============================] - 0s 730us/sample - loss: 134.9736 - val_loss: 489.2086
Epoch 1016/10000
25/25 [==============================] - 0s 1ms/sample - loss: 138.4671 - val_loss: 472.2636
Epoch 1017/10000
25/25 [==============================] - 0s 894us/sample - loss: 102.2140 - val_loss: 447.4026
Epoch 1018/10000
25/25 [==============================] - 0s 959us/sample - loss: 220.8960 - val_loss: 410.0999
Epoch 1019/10000
25/25 [==============================] - 0s 1ms/sample - loss: 195.4369 - val_loss: 365.5253
Epoch 1020/10000
25/25 [==============================] - 0s 901us/sample - loss: 130.8806 - val_loss: 325.2041
Epoch 1021/10000
25/25 [==============================] - 0s 1ms/sample - loss: 140.9530 - val_loss: 306.2873
Epoch 1022/10000
25/25 [==============================] - 0s 1ms/sample - loss: 175.9087 - val_loss: 286.4619
Epoch 1023/10000
25/25 [==============================] - 0s 686us/sample - loss: 148.1944 - val_loss: 273.6447
Epoch 1024/10000
25/25 [==============================] - 0s 1ms/sample - loss: 350.7609 - val_loss: 306.6174
Epoch 1025/10000
25/25 [==============================] - 0s 1ms/sample - loss: 140.2324 - val_loss: 359.9655
Epoch 1026/10000
25/25 [==============================] - 0s 913us/sample - loss: 192.8459 - val_loss: 408.6946
Epoch 1027/10000
25/25 [==============================] - 0s 1ms/sample - loss: 144.7273 - val_loss: 441.7484
Epoch 1028/10000
25/25 [==============================] - 0s 889us/sample - loss: 119.5891 - val_loss: 457.0098
Epoch 1029/10000
25/25 [==============================] - 0s 1ms/sample - loss: 121.8222 - val_loss: 461.2530
Epoch 1030/10000
25/25 [==============================] - 0s 956us/sample - loss: 137.8363 - val_loss: 457.9579
Epoch 1031/10000
25/25 [==============================] - 0s 976us/sample - loss: 208.3460 - val_loss: 457.5731
Epoch 1032/10000
25/25 [==============================] - 0s 884us/sample - loss: 141.1137 - val_loss: 441.6059
Epoch 1033/10000
25/25 [==============================] - 0s 959us/sample - loss: 178.7845 - val_loss: 418.0074
Epoch 1034/10000
25/25 [==============================] - 0s 824us/sample - loss: 163.4638 - val_loss: 417.6765
Epoch 1035/10000
25/25 [==============================] - 0s 876us/sample - loss: 134.5020 - val_loss: 421.3677
Epoch 1036/10000
25/25 [==============================] - 0s 670us/sample - loss: 135.1541 - val_loss: 412.5544
Epoch 1037/10000
25/25 [==============================] - 0s 985us/sample - loss: 174.9399 - val_loss: 390.3551
Epoch 1038/10000
25/25 [==============================] - 0s 1ms/sample - loss: 153.1417 - val_loss: 393.9521
Out[538]:
<tensorflow.python.keras.callbacks.History at 0x7f994c21c7f0>
In [539]:
np.sqrt(((model.predict(xtrain) - ytrain)**2).sum())
Out[539]:
53.407370327212
In [546]:
#np.sqrt(((model.predict(xtest) - ytest)**2).sum())
pd.DataFrame(np.concatenate([model.predict(xtest) , ytest], axis=1))
Out[546]:
0 1
0 20.126032 28.451027
1 27.399759 35.860442
2 57.494453 56.415843
3 63.270092 65.618219
4 33.810261 45.538838
5 30.596041 33.709958
6 33.810261 45.538838
7 22.840353 31.480531
In [98]:
from scipy.stats import mode
fig, axes = plt.subplots(1, 1, figsize=(25, 5))
profile = image[3000][100:2500].copy()
plt.plot(profile, label="as-built", linewidth=3, linestyle='--', color='blue')
plt.plot([0, len(profile)], [profile.mean()]*2, label='as-built mean', linewidth=3, linestyle='dotted', color='blue')
mode1 = mode(profile[profile<profile.mean()].round(decimals=0)).mode
q3 = np.percentile(profile, 60)
cond = profile>q3
profile[cond] = q3 +(profile[cond] - q3)*.5
plt.plot(profile, label='polished', linewidth=3, color='orange')
plt.plot([0, len(profile)], [profile.mean()]*2, label='polished mean', linewidth=3, linestyle='dotted', color='orange')
#mode2 = mode(profile[~cond].round(decimals=0)).mode
mode2 = mode(profile[profile<profile.mean()].round(decimals=0)).mode
print(mode1, mode2)
plt.plot([0, len(profile)], [mode1]*2, label="as-built and polished mode", linewidth=3, linestyle='-.')
plt.legend()
plt.savefig("/home/ben_rasoolov/additive_project/data/paper/figures/mode_vs_mean.png", dpi=300)
[115.] [115.]
In [277]:
%load_ext autoreload
%autoreload 2
%reset -f
import numpy as np
import joblib
import glob
from imports import *
from additive.feature_functions import feature_functions_functions as feature_funs
from additive.features import Features
from additive.preprocessing import load_and_process_image
from functools import reduce
from operator import or_
The autoreload extension is already loaded. To reload it, use:
  %reload_ext autoreload
In [278]:
data_polished = joblib.load("/home/ben_rasoolov/additive_project/data/experiment_03/Polished_V15_T1_Left(Bottom)_500X_3D.info")
In [279]:
x_polished = np.array(data_polished['value'].x)[1000:-1000, 1000:-1000].round(decimals=0)
In [280]:
from scipy.stats import mode
In [ ]:
 
In [281]:
modes = mode(x_polished)
mode_polished = modes.mode.mean() - x_polished.mean()
#mode_polished = (np.percentile(x_polished, 90, axis=1)-x_polished.mean(axis=1)).mean()
rv_polished = (x_polished.min(axis=1)-x_polished.mean(axis=1)).mean()
In [282]:
data_asbuilt = joblib.load("/home/ben_rasoolov/additive_project/data/experiment_03/V15_T1_Left(Bottom)_500X_3D.info")
In [283]:
x_asbuilt = np.array(data_asbuilt['value'].x)[1000:-1000, 1000:-1000].round(decimals=0)
In [284]:
modes = mode(x_asbuilt)
mode_asbuilt = modes.mode.mean() - x_asbuilt.mean()
#mode_asbuilt = (np.percentile(x_asbuilt, 90, axis=1)-x_asbuilt.mean(axis=1)).mean()
In [285]:
rv_asbuilt = (x_asbuilt.min(axis=1)-x_asbuilt.mean(axis=1)).mean()
In [286]:
rku_polished = feature_funs['rku_2d'](x_polished)
rku_asbuilt = feature_funs['rku_2d'](x_asbuilt)
In [287]:
rku_asbuilt, rku_polished
Out[287]:
(3.70802406706433, 2.526385836324556)
In [288]:
mode_asbuilt, mode_polished
Out[288]:
(-0.023651123, 12.448959)
In [289]:
rv_asbuilt, rv_polished
Out[289]:
(-64.23692, -35.417976)
In [290]:
rv_asbuilt-rku_asbuilt*mode_asbuilt, rv_polished-rku_polished*mode_polished
Out[290]:
(-64.14922428430252, -66.86885095969498)
In [291]:
rv_asbuilt/x_asbuilt.std(), rv_polished/x_polished.std()
Out[291]:
(-2.6432981, -2.7193294)
In [292]:
mode1 = mode(x_polished).mode.mean()
mu1 = x_polished.mean()
In [293]:
mode2 = mode(x_asbuilt).mode.mean()
mu2 = x_asbuilt.mean()
In [294]:
x1 = (x_polished.reshape(-1)-x_polished.mean())/x_polished.std()
x2 = (x_asbuilt.reshape(-1)-x_asbuilt.mean())/x_asbuilt.std()
In [295]:
fig, ax = plt.subplots(figsize=(10, 6))
ax.hist(x1, alpha=.5, label='polished', density=False)
ax.hist(x2, alpha=.5, label='as-built', density=False)
ax.plot([mode1-mu1, mode1-mu1], [0, .025], label='polished mode')
ax.plot([0, 0], [0, .025], label='polished mean')
ax.plot([mode2-mu2, mode2-mu2], [0, .025], label='polished mode')
ax.plot([0, 0], [0, .025], label='polished mean')
ax.set_xlabel('Height')
ax.set_ylabel('Frequency')
ax.legend()
Out[295]:
<matplotlib.legend.Legend at 0x7f99a6f47cf8>
In [299]:
def get_stats(x):
    functions = [np.mean, np.median, lambda x: mode(x).mode.mean()]
    return compute(delayed(f)(x) for f in functions)
In [300]:
get_stats(x_asbuilt)
Out[300]:
([153.60017, 154.0, 153.57652],)
In [301]:
get_stats(x_polished)
Out[301]:
([143.70636, 146.0, 156.15532],)
In [41]:
500*20
Out[41]:
10000

before and after polshed comparison

In [1]:
%load_ext autoreload
%autoreload 2
%reset -f
from imports import *
# df = pd.read_csv("/home/ben_rasoolov/additive_project/data/paper/data/global_stats_equalize_hist_v02.csv")
In [2]:
box_files = {
    "morePolished_V07_T1_Left(Bottom)_500X_3D.csv": 'https://auburn.box.com/shared/static/fmdgsitx225kiz856uzmmdxlo30pepyi.csv',
    "morePolished_V07_T1_Right(Top)_500X_3D.csv": "https://auburn.box.com/shared/static/xgoav7ttot5kgsgoul1ee2p5cy5vuct9.csv",
    "morePolished_V07_T2_Right(Top)_500X_3D.csv": "https://auburn.box.com/shared/static/7t3g815aioj4zwkzl33jx1tsivvd8cm6.csv",
    "morePolished_V07_T2_Left(Bottom)_500X_3D.csv": "https://auburn.box.com/shared/static/cy4ny0h4kzs1322htsfh7y35h78rd7uu.csv",
}
In [4]:
from additive.utility import download_from_dict
In [5]:
# download_from_dict(box_files, "/home/ben_rasoolov/additive_project/data/original_images/")
In [7]:
files = glob.glob("/data/additive_project/data/original_images/morePolished_*csv")
files
Out[7]:
[]
In [14]:
from additive.utility import extract_array_from_csv
In [15]:
with Pool(4) as p:
    out = p.map(extract_array_from_csv, files)
/home/ben_rasoolov/additive_project/data/original_images/morePolished_V07_T1_Right(Top)_500X_3D.csv/home/ben_rasoolov/additive_project/data/original_images/morePolished_V07_T2_Right(Top)_500X_3D.csv/home/ben_rasoolov/additive_project/data/original_images/morePolished_V07_T1_Left(Bottom)_500X_3D.csv/home/ben_rasoolov/additive_project/data/original_images/morePolished_V07_T2_Left(Bottom)_500X_3D.csv    /home/ben_rasoolov/additive_project/data/original_images/morePolished_V07_T1_Right(Top)_500X_3D.pd/home/ben_rasoolov/additive_project/data/original_images/morePolished_V07_T1_Left(Bottom)_500X_3D.pd/home/ben_rasoolov/additive_project/data/original_images/morePolished_V07_T2_Left(Bottom)_500X_3D.pd/home/ben_rasoolov/additive_project/data/original_images/morePolished_V07_T2_Right(Top)_500X_3D.pd



file /home/ben_rasoolov/additive_project/data/original_images/morePolished_V07_T2_Left(Bottom)_500X_3D.pd existsfile /home/ben_rasoolov/additive_project/data/original_images/morePolished_V07_T2_Right(Top)_500X_3D.pd exists

file /home/ben_rasoolov/additive_project/data/original_images/morePolished_V07_T1_Right(Top)_500X_3D.pd exists
In [22]:
from additive.utility import image_rescale
images_d = image_rescale(files)
with Pool(4) as p:
    with dask.config.set(pool=p):
        images = images_d.compute()
In [6]:
files = glob.glob("/home/ben_rasoolov/additive_project/data/original_images/morePolished_*pd")
files
Out[6]:
[]
In [4]:
from additive.utility import dfe
from collections import namedtuple
ImageInfo = namedtuple("ImageInfo", "x")
root = "/home/ben_rasoolov/additive_project/data/experiment_04/"
for file, image in zip(files, images):
    d, f, e = dfe(file)
    new_path = root + f + ".info"
    joblib.dump({'value': ImageInfo(image)}, new_path)
---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
<ipython-input-4-eb339ae1373d> in <module>
      3 ImageInfo = namedtuple("ImageInfo", "x")
      4 root = "/home/ben_rasoolov/additive_project/data/experiment_04/"
----> 5 for file, image in zip(files, images):
      6     d, f, e = dfe(file)
      7     new_path = root + f + ".info"

NameError: name 'images' is not defined
In [1]:
%load_ext autoreload
%autoreload 2
%reset -f
from imports import *
from additive.features import ImageInfo, Features
# df = pd.read_csv("/home/ben_rasoolov/additive_project/data/paper/data/global_stats_equalize_hist_v02.csv")
In [2]:
files = glob.glob("../data/experiment_04/*info")
files
Out[2]:
['../data/experiment_04/Polished_V07_T2_Left(Bottom)_500X_3D.info',
 '../data/experiment_04/V07_T2_Left(Bottom)_500X_3D.info',
 '../data/experiment_04/Polished_V07_T1_Left(Bottom)_500X_3D.info',
 '../data/experiment_04/morePolished_V07_T2_Right(Top)_500X_3D.info',
 '../data/experiment_04/morePolished_V07_T1_Right(Top)_500X_3D.info',
 '../data/experiment_04/V07_T2_Right(Top)_500X_3D.info',
 '../data/experiment_04/morePolished_V07_T1_Left(Bottom)_500X_3D.info',
 '../data/experiment_04/Polished_V07_T1_Right(Top)_500X_3D.info',
 '../data/experiment_04/Polished_V07_T2_Right(Top)_500X_3D.info',
 '../data/experiment_04/V07_T1_Left(Bottom)_500X_3D.info',
 '../data/experiment_04/V07_T1_Right(Top)_500X_3D.info',
 '../data/experiment_04/morePolished_V07_T2_Left(Bottom)_500X_3D.info']
In [3]:
ImageInfo = namedtuple("ImageInfo", "x")
def load_images(x):
    return joblib.load(x)['value'].x
with Pool(6) as p:
    images = p.map(load_images, files)
In [8]:
from scipy.stats import mode
from additive.feature_functions import feature_functions_functions as feat_funs
def image_stats(x):
    funs = {k: v for k, v in feat_funs.items()}
    return compute({k: delayed(v)(x) for k, v in funs.items()})[0]
In [10]:
res = [image_stats(x[500:-500, 500:-500]) for x in images]
/home/bzr0014/.local/lib/python3.6/site-packages/numpy/core/fromnumeric.py:746: UserWarning: Warning: 'partition' will ignore the 'mask' of the MaskedArray.
  a.partition(kth, axis=axis, kind=kind, order=order)
In [11]:
pd.DataFrame(res).assign(file=[x.split("/")[-1] for x in files])
Out[11]:
mean median mode_1d mode_2d ra_1d ra_2d rku_1d rku_2d rp_1d rp_2d rq_1d rq_2d rsk_1d rsk_2d rv_1d rv_2d file
0 156.758153 157.102081 5.561661 1.0 11.502046 11.717911 2.451453 2.461532 25.957439 32.933853 13.820404 14.205364 -0.268579 -0.167344 38.065586 60.757141 Polished_V07_T2_Left(Bottom)_500X_3D.info
1 177.753221 176.877884 -1.046246 0.0 16.938068 17.407897 2.811959 2.843771 62.528289 102.751663 20.925889 21.528677 0.243769 0.282603 48.555349 73.362480 V07_T2_Left(Bottom)_500X_3D.info
2 139.529167 139.704361 12.641944 -3.0 10.867956 12.931453 2.436490 2.384222 20.214056 33.796738 12.797262 15.570337 -0.350016 -0.156459 33.894557 56.214226 Polished_V07_T1_Left(Bottom)_500X_3D.info
3 60.857285 63.192513 9.783106 10.0 9.128609 9.417842 2.881590 2.804885 16.401426 53.317192 10.920797 11.324412 -0.774356 -0.674009 33.891792 55.907990 morePolished_V07_T2_Right(Top)_500X_3D.info
4 105.283836 106.990700 8.583746 9.0 9.028659 9.860147 2.889716 2.829913 16.849340 45.957909 10.913875 11.982503 -0.736547 -0.601301 33.398396 51.212940 morePolished_V07_T1_Right(Top)_500X_3D.info
5 175.856914 175.445374 -0.104093 2.0 16.561858 16.928977 2.727582 2.825149 58.995701 92.713425 20.378586 20.894308 0.202108 0.218359 47.285596 71.145241 V07_T2_Right(Top)_500X_3D.info
6 107.514587 109.312920 8.716775 7.0 8.077580 8.864080 2.963711 2.888977 13.474958 29.617126 9.689466 10.841835 -0.845382 -0.648006 30.247091 47.639889 morePolished_V07_T1_Left(Bottom)_500X_3D.info
7 209.922447 210.362076 9.028795 -4.0 11.447176 13.238714 2.333169 2.438858 22.785887 33.664825 13.610626 15.987694 -0.300627 -0.218173 36.185216 61.481110 Polished_V07_T1_Right(Top)_500X_3D.info
8 142.472852 142.998444 15.344728 17.0 11.615823 11.816400 2.334556 2.267206 20.377383 27.051926 13.624758 14.048216 -0.420209 -0.272663 36.811993 55.686913 Polished_V07_T2_Right(Top)_500X_3D.info
9 188.420467 186.360641 -8.037486 -8.0 15.730038 16.745289 2.764421 2.937672 58.986943 90.020844 19.215461 20.707703 0.407426 0.434259 40.525698 63.158852 V07_T1_Left(Bottom)_500X_3D.info
10 195.918021 195.041656 -2.380284 -2.0 16.833730 17.447291 2.685036 2.933086 60.320321 109.982101 20.554394 21.521875 0.248873 0.274079 45.604422 86.227158 V07_T1_Right(Top)_500X_3D.info
11 72.049713 73.947533 5.619812 6.0 9.358821 9.677210 2.923517 2.896443 19.524122 30.159767 11.544605 11.934641 -0.640460 -0.568536 35.442917 56.031059 morePolished_V07_T2_Left(Bottom)_500X_3D.info
In [12]:
def min_max_scale(x, a=0, b=1):
    mn, mx = x.min(), x.max()
    rng = b - a
    out = (x - mn)/(mx-mn)
    return out * rng + a

def get_equalized_stats(image):
    scaled_image = min_max_scale(image, 0, 255).astype('uint8')
    equalized_image = cv2.equalizeHist(scaled_image)
    return image_stats(equalized_image)
In [ ]:
equalized_res = compute([delayed(get_equalized_stats)(x[500:-500, 500:-500]) for x in images])[0]
In [144]:
pd.DataFrame(equalized_res).assign(file=[x.split("/")[-1] for x in files])[['rp_1d', 'rv_1d', 'mode_1d', 'file']]\
    .sort_values(['mode_1d'])
Out[144]:
rp_1d rv_1d mode_1d file
2 123.583894 123.813139 -23.175467 V07_T1_Left(Bottom)_500X_3D.info
5 123.965969 125.022068 -3.861222 V07_T1_Right(Top)_500X_3D.info
0 124.557950 127.246110 2.712053 V07_T2_Left(Bottom)_500X_3D.info
11 124.705182 126.749379 6.081373 V07_T2_Right(Top)_500X_3D.info
9 100.090600 122.329493 39.179011 Polished_V07_T1_Right(Top)_500X_3D.info
1 117.278745 126.911691 40.640307 morePolished_V07_T2_Left(Bottom)_500X_3D.info
3 116.831014 127.172021 43.837700 Polished_V07_T2_Left(Bottom)_500X_3D.info
8 91.589925 120.132741 51.214514 Polished_V07_T1_Left(Bottom)_500X_3D.info
6 107.467249 125.723292 54.356838 morePolished_V07_T1_Right(Top)_500X_3D.info
7 98.152255 125.258575 61.332935 morePolished_V07_T1_Left(Bottom)_500X_3D.info
10 114.919440 127.394724 72.920990 morePolished_V07_T2_Right(Top)_500X_3D.info
4 109.085200 126.866078 81.829393 Polished_V07_T2_Right(Top)_500X_3D.info
In [4]:
dd = dict(zip([x.split("/")[-1] for x in files], images))
In [11]:
im1 = dd['V07_T1_Left(Bottom)_500X_3D.info']
im2 = dd['Polished_V07_T1_Left(Bottom)_500X_3D.info']
im3 = dd['morePolished_V07_T1_Left(Bottom)_500X_3D.info']
ims = [im1, im2, im3]
names = ['asbuilt', 'polished', 'more polished']
dxs = [-450, -430, -600]
dys = [650, 1620, 720]
In [29]:
fig, axes = plt.subplots(1, 3, figsize=(18, 6))
sub_ims = []
for name, im, ax, dx, dy in zip(names, ims, axes, dxs, dys):
    print(im.shape)
    sub_im = im[1000+dx:2000+dx, 1000+dy:2000+dy]
    ax.imshow(sub_im, cmap='jet')
    sub_ims.append(sub_im)
    ax.set_title(name)
plt.savefig("../data/paper/figures/polishedVsHalfVsMorePolished3D.png", dpi=300)
(7269, 7725)
(7256, 7717)
(6854, 7711)
In [59]:
import matplotlib as mpl
mpl.rcParams.update(mpl.rcParamsDefault)
font = {'size'   : 13}
matplotlib.rc('font', **font)
fig, axes = plt.subplots(1, 1, figsize=(20, 4))
N = 1000
for name, sub_im, dx, dy in zip(names, sub_ims, [-20, 10, 30], [0, 45, 60]):
    x = np.arange(sub_im.shape[1])
    plt.plot(x+dx, sub_im[500]-sub_im.min(), label=name, linewidth=3, alpha=.8)
plt.legend(loc='upper left')
plt.savefig("../data/paper/figures/polishedVsHalfVsMorePolished.png", dpi=300)
In [275]:
from additive.experimental import get_best_template_match, get_top_left_best_template_match, equalize_hist
In [205]:
template = ims[0][1000:-1000, 1000:-1000]
out1 = get_best_template_match(ims[1], template, methods[1])
out2 = get_best_template_match(ims[2], template, methods[1])
In [208]:
fig, axes = plt.subplots(1, 3, figsize=(15, 5))
s = slice(1500, 2000)
axes[0].imshow(template[s,s])
axes[1].imshow(out1[s, s])
axes[2].imshow(out2[s, s])
Out[208]:
<matplotlib.image.AxesImage at 0x7f16f1cfd630>
In [364]:
w, h = 5500, 5500
l0 = 1000, 1000
template = ims[0][l0[0]:l0[0]+w, l0[1]:l0[1]+h]
l1 = get_top_left_best_template_match(ims[1], template, methods[3])
l2 = get_top_left_best_template_match(ims[2], template, methods[3])
In [216]:
l0, l1, l2
Out[216]:
((1000, 1000), (1023, 1959), (848, 1043))
In [365]:
l0, l1, l2
Out[365]:
((1000, 1000), (1023, 1959), (848, 1043))
In [363]:
fig, axes = plt.subplots(1, 3, figsize=(15, 5))
profiles = []
for ax, im, (x, y), label in zip(axes, ims, [l0, l1, l2], ['asbuilt', 'polished', 'more polished']):
    ax.imshow(im[x:x+500, y:y+500])
In [301]:
plt.subplots(figsize=(15, 5))
profiles = []
for im, (x, y), label in zip(ims, [l0, l1, l2], ['asbuilt', 'polished', 'more polished']):
    #plt.imshow(im[x:x+500, y:y+500])
    # profile = equalize_hist(im[x:x+500, y:y+2000])[200]
    profile = im[x:x+500, y:y+2000][200].round(decimals=0)
    plt.plot(profile-profile.min(), label=label)
    profiles.append(profile)
plt.legend()
Out[301]:
<matplotlib.legend.Legend at 0x7f16f18f07b8>
In [302]:
from additive.experimental import extract_circles
In [303]:
profile = profiles[0]
circles = []
for profile in profiles:
    circles.append(extract_circles(np.arange(len(profile)), profile, 100))
1976 1976
1976 1976
1976 1976
In [314]:
from scipy.stats import mode
[mode(profile).mode for profile in profiles]
Out[314]:
[array([157.], dtype=float32),
 array([141.], dtype=float32),
 array([109.], dtype=float32)]
In [313]:
[profile.mean()-profile.min() for profile in profiles]
Out[313]:
[32.32300000000001, 21.08, 19.325996]
In [309]:
pd.concat([pd.DataFrame(circle[-1]).mean() for circle in circles], axis=1).round(decimals=2)
Out[309]:
0 1 2
beg 845.78 1119.67 983.00
end 867.67 1143.33 1010.00
index 856.22 1131.00 996.00
h 162.00 113.89 87.88
cx 0.02 0.01 -0.00
cy 0.00 -0.00 0.00
r 0.74 0.78 0.76
x_mean 868.22 1143.00 1008.00
x_std 6.31 6.82 7.79
y_mean 156.08 111.50 87.61
y_std 3.41 2.20 1.89
In [322]:
from additive.experimental import get_cut_points
[len(profile)/len(get_cut_points(profile, mode(profile).mode[0])) for profile in profiles]
Out[322]:
[333.3333333333333, 250.0, 285.7142857142857]
In [331]:
[len(profile)/len(get_cut_points(profile, np.percentile(profile, 30))) for profile in profiles]
Out[331]:
[166.66666666666666, 200.0, 250.0]
In [332]:
from additive.experimental import get_ratio_under_thresh
In [347]:
[get_ratio_under_thresh(profile, np.percentile(profile, 15)) for profile in profiles]
Out[347]:
[12.0, 9.714285714285714, 13.0]
In [355]:
tmp = [[(im<np.min(im)+t).sum()/(im.shape[0]*im.shape[1]) for im in ims]  for t in range(10, 200, 10)]
In [357]:
plt.plot(tmp)
Out[357]:
[<matplotlib.lines.Line2D at 0x7f16f1735dd8>,
 <matplotlib.lines.Line2D at 0x7f16f1735f60>,
 <matplotlib.lines.Line2D at 0x7f16f16f9518>]

formalized template matching

In [366]:
%load_ext autoreload
%autoreload 2
%reset -f
from imports import *
# df = pd.read_csv("/home/ben_rasoolov/additive_project/data/paper/data/global_stats_equalize_hist_v02.csv")
The autoreload extension is already loaded. To reload it, use:
  %reload_ext autoreload
In [429]:
methods = [cv2.TM_CCOEFF, cv2.TM_CCOEFF_NORMED, cv2.TM_CCORR,
            cv2.TM_CCORR_NORMED, cv2.TM_SQDIFF, cv2.TM_SQDIFF_NORMED]
In [476]:
polished_files = glob.glob("/home/ben_rasoolov/additive_project/data/experiment_03/Polished*")
unpolished_files = [x.replace('Polished_', '') for x in polished_files]
In [477]:
assert len([x for x in unpolished_files if not os.path.exists(x)]) == 0
assert len(polished_files) == len(unpolished_files)
In [478]:
len(polished_files)
Out[478]:
36
In [671]:
from additive.experimental import match_asbuilt_unpolished, get_image_from_top_left, image_correlation

An example

In [599]:
index = 30
as_built = np.array(joblib.load(unpolished_files[index])['value'].x)
polished = np.array(joblib.load(polished_files[index])['value'].x)
In [600]:
print(unpolished_files[index])
/home/ben_rasoolov/additive_project/data/experiment_03/V07_T1_Left(Bottom)_500X_3D.info
In [588]:
r1, r2 = match_asbuilt_unpolished(as_built, polished, .7)
In [589]:
plt.imshow(get_image_from_top_left(as_built, r1, (1000, 1000))[:3000, :3000])
plt.show()
plt.imshow(get_image_from_top_left(polished, r2, (1000, 1000))[:3000, :3000])
Out[589]:
<matplotlib.image.AxesImage at 0x7f16f12454a8>
In [482]:
def load_image(x):
    return np.array(joblib.load(x)['value'].x)

polished_files_b = bag.from_sequence(polished_files).map(load_image)
asbuilt_files_b = bag.from_sequence(unpolished_files).map(load_image)
In [521]:
top_left_matches_b = bag.map(match_asbuilt_unpolished, asbuilt_files_b, polished_files_b, ratio=.5)
In [522]:
with Pool(10) as p:
    with dask.config.set(pool=p):
        top_left_matches_5 = top_left_matches_b.compute()
In [610]:
top_left_matches = []
for (file, x, y, z) in zip(unpolished_files, top_left_matches_3, top_left_matches_5, top_left_matches_7):
    file = file.split('/')[-1]
    error1 = np.sqrt(((np.array(x)-np.array(y))**2).sum())
    error2 = np.sqrt(((np.array(x)-np.array(z))**2).sum())
    error3 = np.sqrt(((np.array(y)-np.array(z))**2).sum())
    if error1 < 50:
        res = x
        error = error1
    elif error2 < 50:
        res = x
        error = error2
    elif error3 < 50:
        res = y
        error = error3
    else:
        res = None
    print(res)
    top_left_matches.append(res)
    # if error > 40:
    #     print(file, error)
    #     print(file, error)
(array([ 67, 192]), array([0, 0]))
(array([0, 0]), array([ 28, 528]))
(array([73, 92]), array([0, 0]))
(array([14,  0]), array([  0, 524]))
(array([231,   0]), array([  0, 322]))
(array([257,  88]), array([0, 0]))
(array([35,  0]), array([   0, 1379]))
(array([15,  0]), array([ 0, 27]))
(array([0, 0]), array([106, 303]))
(array([100,  95]), array([0, 0]))
(array([318, 206]), array([0, 0]))
(array([0, 0]), array([ 30, 819]))
(array([232,   0]), array([  0, 430]))
(array([264, 204]), array([0, 0]))
(array([3, 0]), array([   0, 1930]))
(array([0, 0]), array([ 39, 871]))
(array([59,  0]), array([   0, 1273]))
(array([0, 0]), array([ 51, 869]))
(array([305,   0]), array([  0, 199]))
(array([ 0, 54]), array([16,  0]))
(array([ 87, 293]), array([0, 0]))
(array([ 39, 390]), array([0, 0]))
(array([320,   0]), array([  0, 522]))
(array([27,  0]), array([  0, 719]))
(array([268,  62]), array([0, 0]))
(array([301, 202]), array([0, 0]))
(array([0, 0]), array([0, 0]))
(array([371,   0]), array([  0, 211]))
(array([0, 0]), array([0, 0]))
(array([304,  65]), array([0, 0]))
(array([0, 0]), array([ 22, 959]))
None
None
(array([0, 0]), array([ 24, 646]))
(array([0, 0]), array([186, 318]))
(array([ 58, 292]), array([0, 0]))
In [611]:
joblib.dump((unpolished_files, unpolished_files, top_left_matches_3, top_left_matches_5, top_left_matches_7), 
            "/home/ben_rasoolov/additive_project/data/experiment_03/polished_vs_asbuilt_top_left_matches.tuple")
Out[611]:
['/home/ben_rasoolov/additive_project/data/experiment_03/polished_vs_asbuilt_top_left_matches.tuple']
In [699]:
joblib.dump((list(unpolished_files), list(polished_files), top_left_matches, correlations), 
            "/home/ben_rasoolov/additive_project/data/experiment_03/polished_vs_asbuilt_top_left_matches.list")
Out[699]:
['/home/ben_rasoolov/additive_project/data/experiment_03/polished_vs_asbuilt_top_left_matches.list']

good matches:

  • /home/ben_rasoolov/additive_project/data/experiment_03/V11_T2_Right(Top)_500X_3D.info 871.8726971295753
In [697]:
import random
polished_vs_asbuilt_top_left_matches = joblib.load(
    "/home/ben_rasoolov/additive_project/data/experiment_03/polished_vs_asbuilt_top_left_matches.list")
# polished_vs_asbuilt_top_left_matches = zip(*polished_vs_asbuilt_top_left_matches)
In [721]:
dxy = (500, 500)
correlations = []
for as_file, po_file, val, *_ in zip(*polished_vs_asbuilt_top_left_matches):
    print(as_file)
    if val is None:
        correlations.append(-1)
        continue
    as_tl, po_tl = val
    asbuilt = load_image(as_file)
    polished = load_image(po_file)
    x = get_image_from_top_left(asbuilt, as_tl, dxy)[:2000, 2000]
    y = get_image_from_top_left(polished, po_tl, dxy)[:2000, 2000]
    plt.plot(x)
    plt.plot(y)
    break
    # correlation = image_correlation(x, y)
    # correlations.append(correlation)
    # print(correlation)
/home/ben_rasoolov/additive_project/data/experiment_03/V03_T2_Left(Bottom)_500X_3D.info
In [708]:
tmp = joblib.load("/home/ben_rasoolov/additive_project/data/experiment_03/polished_vs_asbuilt_top_left_matches.list")
res = pd.DataFrame(sorted(zip(*tmp), key=lambda x: x[-1]), columns=['asbuilt', 'polished', 'top_left_match', 'correlation'])
In [718]:
res[(res['correlation']<1) & (res['correlation']>.21)]
Out[718]:
asbuilt polished top_left_match correlation
13 V05_T1_Left(Bottom)_500X_3D.info Polished_V05_T1_Left(Bottom)_500X_3D.info ([15, 0], [0, 27]) 0.240367
14 V01_T1_Left(Bottom)_500X_3D.info Polished_V01_T1_Left(Bottom)_500X_3D.info ([320, 0], [0, 522]) 0.244180
15 V03_T2_Left(Bottom)_500X_3D.info Polished_V03_T2_Left(Bottom)_500X_3D.info ([67, 192], [0, 0]) 0.268046
16 V11_T2_Left(Bottom)_500X_3D.info Polished_V11_T2_Left(Bottom)_500X_3D.info ([0, 0], [51, 869]) 0.276233
17 V01_T1_Right(Top)_500X_3D.info Polished_V01_T1_Right(Top)_500X_3D.info ([232, 0], [0, 430]) 0.280534
18 V09_T1_Right(Top)_500X_3D.info Polished_V09_T1_Right(Top)_500X_3D.info ([0, 0], [30, 819]) 0.286476
19 V09_T1_Left(Bottom)_500X_3D.info Polished_V09_T1_Left(Bottom)_500X_3D.info ([27, 0], [0, 719]) 0.292332
20 V09_T2_Left(Bottom)_500X_3D.info Polished_V09_T2_Left(Bottom)_500X_3D.info ([0, 0], [106, 303]) 0.303910
21 V07_T2_Left(Bottom)_500X_3D.info Polished_V07_T2_Left(Bottom)_500X_3D.info ([35, 0], [0, 1379]) 0.315987
22 V11_T1_Right(Top)_500X_3D.info Polished_V11_T1_Right(Top)_500X_3D.info ([14, 0], [0, 524]) 0.327908
23 V17_T2_Right(Top)_500X_3D.info Polished_V17_T2_Right(Top)_500X_3D.info ([304, 65], [0, 0]) 0.330157
24 V15_T2_Left(Bottom)_500X_3D.info Polished_V15_T2_Left(Bottom)_500X_3D.info ([318, 206], [0, 0]) 0.350932
25 V07_T2_Right(Top)_500X_3D.info Polished_V07_T2_Right(Top)_500X_3D.info ([59, 0], [0, 1273]) 0.352210
26 V11_T1_Left(Bottom)_500X_3D.info Polished_V11_T1_Left(Bottom)_500X_3D.info ([0, 0], [28, 528]) 0.356179
27 V17_T1_Right(Top)_500X_3D.info Polished_V17_T1_Right(Top)_500X_3D.info ([301, 202], [0, 0]) 0.378422
28 V17_T1_Left(Bottom)_500X_3D.info Polished_V17_T1_Left(Bottom)_500X_3D.info ([264, 204], [0, 0]) 0.406239
29 V17_T2_Left(Bottom)_500X_3D.info Polished_V17_T2_Left(Bottom)_500X_3D.info ([268, 62], [0, 0]) 0.456393
30 V07_T1_Right(Top)_500X_3D.info Polished_V07_T1_Right(Top)_500X_3D.info ([0, 0], [24, 646]) 0.493858
31 V01_T2_Right(Top)_500X_3D.info Polished_V01_T2_Right(Top)_500X_3D.info ([257, 88], [0, 0]) 0.515026
32 V07_T1_Left(Bottom)_500X_3D.info Polished_V07_T1_Left(Bottom)_500X_3D.info ([0, 0], [22, 959]) 0.529312
33 V01_T2_Left(Bottom)_500X_3D.info Polished_V01_T2_Left(Bottom)_500X_3D.info ([231, 0], [0, 322]) 0.540499
In [650]:
dxy = np.random.randint(4000, size=2)
plt.imshow(get_image_from_top_left(as_built, as_tl, dxy)[:1000, :1000])
plt.show()
plt.imshow(get_image_from_top_left(polished, po_tl, dxy)[:1000, :1000])
Out[650]:
<matplotlib.image.AxesImage at 0x7f17086ac9b0>
In [653]:
match_asbuilt_unpolished(asbuilt, polished, .7)
998 1158
Out[653]:
(array([371,   0]), array([  0, 204]))
In [656]:
match_asbuilt_unpolished(polished, asbuilt, .8)
665 772
Out[656]:
(array([  0, 205]), array([374,   0]))
In [52]:
data = joblib.load("../data/experiment_03/Polished_V17_T1_Left(Bottom)_500X_3D.info")
image = np.array(data['value'].x)
In [64]:
data_ab = joblib.load("../data/experiment_03/V17_T1_Left(Bottom)_500X_3D.info")
image_ab = np.array(data_ab['value'].x)
In [68]:
plt.subplots(figsize=(20, 20))
plt.imshow(image_ab[500:-500], cmap='jet')
Out[68]:
<matplotlib.image.AxesImage at 0x7f0158fa1f60>
In [69]:
plt.subplots(figsize=(20, 20))
plt.imshow(image[500:-500], cmap='jet')
Out[69]:
<matplotlib.image.AxesImage at 0x7f01594025f8>
In [70]:
df = pd.read_csv("../data/paper/data/global_stats_equalize_hist_v02.csv")
In [77]:
df[df['specimen']=='V01'].groupby(['ispolished']).mean()
Out[77]:
Unnamed: 0 Unnamed: 0.1 ra_1d rq_1d rsk_1d rku_1d rp_1d rv_1d ra_2d rq_2d rp_2d rv_2d rsk_2d rku_2d mode_1d mode_2d median mean
ispolished
False 62.50 62.50 62.231259 72.038829 -0.025634 1.871606 123.833973 127.343620 63.682195 73.541757 126.209557 128.790443 -0.014940 1.801398 -1.056433 -23.75 129.5 128.790443
True 46.75 46.75 56.806961 64.787442 -0.405448 1.958130 91.148023 125.852803 63.889889 73.781657 126.507071 128.492929 -0.006824 1.800958 67.679014 97.50 129.0 128.492929
In [98]:
def get_v01(path):
    df2 = pd.read_csv(path)
    return df2[df2['specimen']=='V01']#.groupby(['ispolished']).mean()
In [101]:
files = glob.glob("../data/experiment_03/V01*")
In [114]:
from additive.feature_functions import *
# out_b = bag.from_sequence(files).map(lambda x: np.array(joblib.load(x)['value'].x)[500:-500])
In [108]:
with Pool(5) as pool:
    with dask.config.set(pool=pool):
        out = out_b.compute()
In [113]:
list(map(mode_2d, out))
Out[113]:
[-10.0, -1.0, -3.0, -3.0]
In [115]:
list(map(mode_1d, out))
Out[115]:
[-6.087558, 4.278402, -6.42732, 3.9152434]
In [117]:
pd.read_csv("../data/paper/global_stats_tilted_rotated_cropped_v02.csv")
---------------------------------------------------------------------------
FileNotFoundError                         Traceback (most recent call last)
<ipython-input-117-9dffc192f4b3> in <module>
----> 1 pd.read_csv("../data/paper/global_stats_tilted_rotated_cropped_v02.csv")

~/.local/lib/python3.6/site-packages/pandas/io/parsers.py in parser_f(filepath_or_buffer, sep, delimiter, header, names, index_col, usecols, squeeze, prefix, mangle_dupe_cols, dtype, engine, converters, true_values, false_values, skipinitialspace, skiprows, skipfooter, nrows, na_values, keep_default_na, na_filter, verbose, skip_blank_lines, parse_dates, infer_datetime_format, keep_date_col, date_parser, dayfirst, iterator, chunksize, compression, thousands, decimal, lineterminator, quotechar, quoting, doublequote, escapechar, comment, encoding, dialect, tupleize_cols, error_bad_lines, warn_bad_lines, delim_whitespace, low_memory, memory_map, float_precision)
    700                     skip_blank_lines=skip_blank_lines)
    701 
--> 702         return _read(filepath_or_buffer, kwds)
    703 
    704     parser_f.__name__ = name

~/.local/lib/python3.6/site-packages/pandas/io/parsers.py in _read(filepath_or_buffer, kwds)
    427 
    428     # Create the parser.
--> 429     parser = TextFileReader(filepath_or_buffer, **kwds)
    430 
    431     if chunksize or iterator:

~/.local/lib/python3.6/site-packages/pandas/io/parsers.py in __init__(self, f, engine, **kwds)
    893             self.options['has_index_names'] = kwds['has_index_names']
    894 
--> 895         self._make_engine(self.engine)
    896 
    897     def close(self):

~/.local/lib/python3.6/site-packages/pandas/io/parsers.py in _make_engine(self, engine)
   1120     def _make_engine(self, engine='c'):
   1121         if engine == 'c':
-> 1122             self._engine = CParserWrapper(self.f, **self.options)
   1123         else:
   1124             if engine == 'python':

~/.local/lib/python3.6/site-packages/pandas/io/parsers.py in __init__(self, src, **kwds)
   1851         kwds['usecols'] = self.usecols
   1852 
-> 1853         self._reader = parsers.TextReader(src, **kwds)
   1854         self.unnamed_cols = self._reader.unnamed_cols
   1855 

pandas/_libs/parsers.pyx in pandas._libs.parsers.TextReader.__cinit__()

pandas/_libs/parsers.pyx in pandas._libs.parsers.TextReader._setup_parser_source()

FileNotFoundError: [Errno 2] File b'../data/paper/global_stats_tilted_rotated_cropped_v02.csv' does not exist: b'../data/paper/global_stats_tilted_rotated_cropped_v02.csv'
In [1]:
%load_ext autoreload
%autoreload 2
In [2]:
from imports import *
In [391]:
# p = joblib.load('../data/experiment_03/Polished_V03_T1_Right(Top)_500X_3D.info')
p = joblib.load('../data/experiment_03/Polished_V17_T2_Left(Bottom)_500X_3D.info')
In [392]:
# a = joblib.load("../data/experiment_03/V03_T1_Right(Top)_500X_3D.info")
a = joblib.load("../data/experiment_03/V17_T2_Left(Bottom)_500X_3D.info")
In [393]:
pol = np.array(p['value'].x)[1000:-1000]
In [394]:
try: asb = a[1000:-1000]
except: asb = np.array(a['value'].x[1000:-1000])
In [395]:
from additive.experimental import match_asbuilt_unpolished, get_image_from_top_left
In [396]:
r1, r2 = match_asbuilt_unpolished(asb, pol, .4)
In [397]:
r1, r2, asb.shape, pol.shape
Out[397]:
(array([268,  59]), array([0, 0]), (5251, 7711), (4655, 7720))
In [398]:
index = 0#np.random.randint(0, 1000)
size = 3000
asub = get_image_from_top_left(asb, r1, (1000, 1000))[index:index+size, index:index+size]
psub = get_image_from_top_left(pol, r2, (1000, 1000))[index:index+size, index:index+size]
In [399]:
from scipy.stats import pearsonr
pearsonr(asub.reshape(-1), psub.reshape(-1))
Out[399]:
(0.4662662273241144, 0.0)
In [400]:
fig, axes = plt.subplots(1, 2, figsize=(10, 5))
for ax, img in zip(axes, [asub, psub]):
    ax.imshow(img)
    ax.grid(False)
In [401]:
plt.subplots(figsize=(10, 5))
ahist = plt.hist(asub.reshape(-1)-asub.mean(), bins=55, alpha=.9, label="As-built Histogram", 
         density=True, linewidth=1, edgecolor=None, color="#F08080")
phist = plt.hist(psub.reshape(-1)-psub.mean(), bins=55, alpha=.7, label="Polished Histogram", 
         density=True, edgecolor=None, color='#008B8B')
amode = get_mode(ahist)
pmode = get_mode(phist)
height = phist[0].max()
plt.vlines(pmode, 0, height, linestyle='--', label="Polished Mode")
plt.vlines(amode, 0, height, linestyle='dotted', label="As-built Mode")
plt.xlabel("Adjusted height ($v_i - \mu_i$)")
plt.ylabel("Relative Frequency")
plt.legend()
plt.savefig("../data/paper/figures/v17_t2_left_histogram_comparison.png", dpi=300)
In [356]:
def get_mode(hist_res):
    a, b, *_ = hist_res
    return b[np.argmax(a)]
In [357]:
get_mode(phist)
Out[357]:
6.1397524
In [351]:
ahist
Out[351]:
(array([1.03735185e-05, 1.05752468e-05, 1.09786404e-05, 2.84119709e-05,
        2.02283611e-05, 3.93329244e-05, 7.39979119e-05, 1.40157761e-04,
        3.95491176e-04, 7.18856019e-04, 8.74690185e-04, 1.42494826e-03,
        1.78897090e-03, 2.35977609e-03, 3.11404690e-03, 3.94277659e-03,
        4.95333118e-03, 6.40340326e-03, 8.27631415e-03, 1.00573091e-02,
        1.18791601e-02, 1.40114031e-02, 1.55447041e-02, 1.64295620e-02,
        1.68020292e-02, 1.62588609e-02, 1.53999905e-02, 1.61184447e-02,
        1.52742105e-02, 1.45423006e-02, 1.24047522e-02, 1.08700985e-02,
        9.43143949e-03, 7.64070530e-03, 5.57931601e-03, 4.08498038e-03,
        3.11508425e-03, 2.30709934e-03, 2.07165133e-03, 1.40296214e-03,
        9.35980451e-04, 6.80820458e-04, 4.60353700e-04, 3.11696033e-04,
        2.16633645e-04, 1.53182290e-04, 1.18027822e-04, 1.02207973e-04,
        1.03879467e-04, 9.73669975e-05, 1.00334976e-04, 6.89552193e-05,
        4.37128545e-05, 4.92166797e-05, 8.30745942e-05]),
 array([-99.23427  , -95.378296 , -91.52233  , -87.66636  , -83.810394 ,
        -79.95442  , -76.09845  , -72.242485 , -68.38651  , -64.53055  ,
        -60.674576 , -56.818607 , -52.96264  , -49.106667 , -45.250698 ,
        -41.39473  , -37.53876  , -33.682793 , -29.826822 , -25.970852 ,
        -22.114883 , -18.258915 , -14.402945 , -10.546976 ,  -6.6910067,
         -2.8350372,   1.020932 ,   4.876901 ,   8.73287  ,  12.58884  ,
         16.444809 ,  20.300777 ,  24.156748 ,  28.012716 ,  31.868687 ,
         35.724655 ,  39.580624 ,  43.436592 ,  47.292564 ,  51.148533 ,
         55.0045   ,  58.86047  ,  62.71644  ,  66.57241  ,  70.428375 ,
         74.28435  ,  78.14032  ,  81.996284 ,  85.85226  ,  89.70822  ,
         93.56419  ,  97.420166 , 101.27613  , 105.1321   , 108.98807  ,
        112.84404  ], dtype=float32),
 <a list of 55 Patch objects>)
In [408]:
df = pd.read_csv("../data/paper/data/global_stats_no_transform_v02.csv").drop('Unnamed: 0', axis=1)
In [437]:
df.groupby(['ispolished', 'specimen']).mean().to_csv("../data/paper/data/table_02_v01.csv")#.filter(regex=".*2d")
In [590]:
from scipy.stats import norm
x = norm.rvs(100, 100, size=(50, 7000))
In [591]:
freq, val = np.histogram(x, bins=100, density=True, )
val[np.argmax(freq)]
Out[591]:
80.34676840911493
In [592]:
rng = ((x.max() - x.min())/50).round(decimals=2)
In [593]:
from additive.feature_functions import mode_1d, mode
mode(x.round(2) // rng).mode.mean() * rng
Out[593]:
78.08422857142858
In [618]:
mode_1d(x), mode_2d(x, 50)
Out[618]:
(-2.06, -19.559437225171564)
In [607]:
mode(x.round(0).reshape(-1))
Out[607]:
ModeResult(mode=array([113.]), count=array([1474]))
In [616]:
freq, val, *_ = plt.hist(x.reshape(-1), bins=50)
val[np.argmax(freq)]
Out[616]:
80.34676840911493
In [617]:
freq, val, *_ = np.histogram(x.reshape(-1), bins=50)
val[np.argmax(freq)]
Out[617]:
80.34676840911493
In [1]:
%load_ext autoreload
%autoreload 2
from imports import *
files = glob.glob("/data/additive_project/data/experiment_03/*info")
In [2]:
# client = Client(scheduler_file="/data/additive_project/jupyter/projects/schedfile")
In [3]:
from additive.feature_functions import feature_functions_functions as feat_funs
def image_stats(x, feat_funs):
    funs = {k: v for k, v in feat_funs.items()}
    return compute({k: delayed(v)(x) for k, v in funs.items()})[0]
In [4]:
from additive.preprocessing import correct_aligment, process_image

def read_file_and_get_stats(file):
    print(file)
    data = joblib.load(file)
    try: data = np.array(data['value'].x)
    except: data = np.array(data)
    image = data[500:-500, 500:-500]
    # image = process_image(image,)
    return image_stats(image, feat_funs)
In [5]:
with Pool(2) as p:
    with dask.config.set(pool=p):
        res = bag.from_sequence(files).map(read_file_and_get_stats).compute()
/data/additive_project/data/experiment_03/Polished_V09_T1_Right(Top)_500X_3D.info/data/additive_project/data/experiment_03/V06_T1_R_3d.info

/data/additive_project/data/experiment_03/V04_T1_L_3d.info
/data/additive_project/data/experiment_03/V14_T1_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V07_T2_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V03_T1_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V19_T1_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V13_T2_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V07_T2_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V04_T1_R_3d.info
/data/additive_project/data/experiment_03/V03_T1_L_3d.info
/data/additive_project/data/experiment_03/Polished_V07_T1_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V01_T2_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V01_T2_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V13_T1_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V08_T2_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V07_T2_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V05_T1_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V03_T1_R_3d.info
/data/additive_project/data/experiment_03/V17_T1_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V05_T2_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V19_T1_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V03_T1_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V17_T2_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V10_T2_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V12_T2_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V17_T2_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V05_T2_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V10_T2_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V07_T1_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V03_T2_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V07_T2_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V05_T1_R_3d.info
/data/additive_project/data/experiment_03/V03_T2_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V15_T2_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V03_T2_L_3d.info
/data/additive_project/data/experiment_03/V16_T2_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V09_T1_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V19_T1_Right(Top)_500X_3D_2.info
/data/additive_project/data/experiment_03/V06_T1_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V03_T1_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V13_T1_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V11_T2_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V02_T2_L_3d.info
/data/additive_project/data/experiment_03/Polished_V09_T2_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V19_T2_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V06_T1_L_3d.info
/data/additive_project/data/experiment_03/V15_T2_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V17_T1_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V05_T2_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V11_T2_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V05_T2_R_3d.info
/data/additive_project/data/experiment_03/V11_T1_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V01_T1_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V15_T1_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V15_T1_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V16_T1_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V01_T1_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V08_T2_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V18_T1_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V19_T1_Left(Bottom)_500X_3D_2.info
/data/additive_project/data/experiment_03/V02_T1_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V08_T1_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V03_T1_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V09_T2_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V01_T2_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V11_T1_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V13_T1_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V09_T2_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V07_T1_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V16_T2_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V15_T1_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V04_T2_R_3d.info
/data/additive_project/data/experiment_03/V04_T2_L_3d.info
/data/additive_project/data/experiment_03/V17_T2_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V01_T2_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V03_T2_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V13_T2_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V02_T2_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V06_T2_R_3d.info
/data/additive_project/data/experiment_03/V07_T1_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V06_T2_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V15_T1_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V03_T2_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V02_T2_R_3d.info
/data/additive_project/data/experiment_03/V16_T1_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V12_T1_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V14_T1_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V11_T1_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V04_T1_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V02_T1_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V05_T1_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V05_T1_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V14_T2_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V02_T1_L_3d.info
/data/additive_project/data/experiment_03/V12_T1_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V15_T2_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V18_T2_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V11_T2_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V10_T1_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V13_T2_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V19_T2_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V01_T1_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V19_T2_Right(Top)_500X_3D_2.info
/data/additive_project/data/experiment_03/V02_T2_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V04_T1_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V04_T2_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V09_T1_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V10_T1_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V05_T2_L_3d.info
/data/additive_project/data/experiment_03/V17_T2_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V06_T1_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V02_T1_R_3d.info
/data/additive_project/data/experiment_03/V17_T1_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V06_T2_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V05_T2_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V12_T2_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V08_T1_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V03_T2_R_3d.info
/data/additive_project/data/experiment_03/V14_T2_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V13_T2_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V09_T1_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V09_T2_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V06_T2_L_3d.info
/data/additive_project/data/experiment_03/Polished_V15_T2_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V19_T2_Left(Bottom)_500X_3D_2.info
/data/additive_project/data/experiment_03/V05_T1_L_3d.info
/data/additive_project/data/experiment_03/V13_T1_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V18_T1_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V05_T1_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V11_T1_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V18_T2_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V01_T1_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/Polished_V17_T1_Right(Top)_500X_3D.info
/data/additive_project/data/experiment_03/V04_T2_Left(Bottom)_500X_3D.info
/data/additive_project/data/experiment_03/V11_T2_Right(Top)_500X_3D.info
In [9]:
out = pd.DataFrame(res).assign(file=files)
In [11]:
from additive.utility import *
In [13]:
get_file_info(out, 'file').to_csv("../data/paper/data/global_stats_v03.csv", index=False)
In [ ]: